var/home/core/zuul-output/0000755000175000017500000000000015114320642014523 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114333035015467 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005505316015114333026017700 0ustar rootrootDec 04 15:02:28 crc systemd[1]: Starting Kubernetes Kubelet... Dec 04 15:02:28 crc restorecon[4720]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:29 crc restorecon[4720]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 15:02:29 crc restorecon[4720]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 04 15:02:29 crc kubenswrapper[4946]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 15:02:29 crc kubenswrapper[4946]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 04 15:02:29 crc kubenswrapper[4946]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 15:02:29 crc kubenswrapper[4946]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 15:02:29 crc kubenswrapper[4946]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 04 15:02:29 crc kubenswrapper[4946]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.272197 4946 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275048 4946 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275058 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275064 4946 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275069 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275073 4946 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275078 4946 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275083 4946 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275088 4946 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275092 4946 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275096 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275100 4946 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275104 4946 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275107 4946 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275111 4946 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275129 4946 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275133 4946 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275136 4946 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275140 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275144 4946 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275148 4946 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275152 4946 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275155 4946 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275159 4946 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275163 4946 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275166 4946 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275170 4946 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275173 4946 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275184 4946 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275191 4946 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275194 4946 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275198 4946 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275202 4946 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275205 4946 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275210 4946 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275216 4946 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275221 4946 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275225 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275229 4946 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275233 4946 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275236 4946 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275240 4946 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275244 4946 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275248 4946 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275252 4946 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275256 4946 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275259 4946 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275263 4946 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275267 4946 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275271 4946 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275274 4946 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275278 4946 feature_gate.go:330] unrecognized feature gate: Example Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275285 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275288 4946 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275293 4946 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275296 4946 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275302 4946 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275306 4946 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275312 4946 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275316 4946 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275319 4946 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275323 4946 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275326 4946 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275330 4946 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275333 4946 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275337 4946 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275341 4946 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275345 4946 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275349 4946 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275352 4946 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275355 4946 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.275361 4946 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.278972 4946 flags.go:64] FLAG: --address="0.0.0.0" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.278992 4946 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279005 4946 flags.go:64] FLAG: --anonymous-auth="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279014 4946 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279020 4946 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279025 4946 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279033 4946 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279039 4946 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279043 4946 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279047 4946 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279052 4946 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279056 4946 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279060 4946 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279064 4946 flags.go:64] FLAG: --cgroup-root="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279068 4946 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279072 4946 flags.go:64] FLAG: --client-ca-file="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279077 4946 flags.go:64] FLAG: --cloud-config="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279081 4946 flags.go:64] FLAG: --cloud-provider="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279084 4946 flags.go:64] FLAG: --cluster-dns="[]" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279092 4946 flags.go:64] FLAG: --cluster-domain="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279096 4946 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279101 4946 flags.go:64] FLAG: --config-dir="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279105 4946 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279109 4946 flags.go:64] FLAG: --container-log-max-files="5" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279129 4946 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279135 4946 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279140 4946 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279145 4946 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279149 4946 flags.go:64] FLAG: --contention-profiling="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279153 4946 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279157 4946 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279161 4946 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279166 4946 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279172 4946 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279176 4946 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279180 4946 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279184 4946 flags.go:64] FLAG: --enable-load-reader="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279189 4946 flags.go:64] FLAG: --enable-server="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279193 4946 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279199 4946 flags.go:64] FLAG: --event-burst="100" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279204 4946 flags.go:64] FLAG: --event-qps="50" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279208 4946 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279213 4946 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279217 4946 flags.go:64] FLAG: --eviction-hard="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279222 4946 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279227 4946 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279231 4946 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279235 4946 flags.go:64] FLAG: --eviction-soft="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279239 4946 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279244 4946 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279247 4946 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279253 4946 flags.go:64] FLAG: --experimental-mounter-path="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279257 4946 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279262 4946 flags.go:64] FLAG: --fail-swap-on="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279268 4946 flags.go:64] FLAG: --feature-gates="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279276 4946 flags.go:64] FLAG: --file-check-frequency="20s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279281 4946 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279289 4946 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279296 4946 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279301 4946 flags.go:64] FLAG: --healthz-port="10248" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279306 4946 flags.go:64] FLAG: --help="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279311 4946 flags.go:64] FLAG: --hostname-override="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279316 4946 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279321 4946 flags.go:64] FLAG: --http-check-frequency="20s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279326 4946 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279331 4946 flags.go:64] FLAG: --image-credential-provider-config="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279336 4946 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279342 4946 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279347 4946 flags.go:64] FLAG: --image-service-endpoint="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279352 4946 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279356 4946 flags.go:64] FLAG: --kube-api-burst="100" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279361 4946 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279367 4946 flags.go:64] FLAG: --kube-api-qps="50" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279374 4946 flags.go:64] FLAG: --kube-reserved="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279379 4946 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279384 4946 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279397 4946 flags.go:64] FLAG: --kubelet-cgroups="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279402 4946 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279407 4946 flags.go:64] FLAG: --lock-file="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279415 4946 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279420 4946 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279425 4946 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279432 4946 flags.go:64] FLAG: --log-json-split-stream="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279437 4946 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279444 4946 flags.go:64] FLAG: --log-text-split-stream="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279448 4946 flags.go:64] FLAG: --logging-format="text" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279452 4946 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279457 4946 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279462 4946 flags.go:64] FLAG: --manifest-url="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279476 4946 flags.go:64] FLAG: --manifest-url-header="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279483 4946 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279488 4946 flags.go:64] FLAG: --max-open-files="1000000" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279494 4946 flags.go:64] FLAG: --max-pods="110" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279499 4946 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279504 4946 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279509 4946 flags.go:64] FLAG: --memory-manager-policy="None" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279513 4946 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279518 4946 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279523 4946 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279527 4946 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279542 4946 flags.go:64] FLAG: --node-status-max-images="50" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279546 4946 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279551 4946 flags.go:64] FLAG: --oom-score-adj="-999" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279555 4946 flags.go:64] FLAG: --pod-cidr="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279560 4946 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279569 4946 flags.go:64] FLAG: --pod-manifest-path="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279574 4946 flags.go:64] FLAG: --pod-max-pids="-1" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279578 4946 flags.go:64] FLAG: --pods-per-core="0" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279583 4946 flags.go:64] FLAG: --port="10250" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279589 4946 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279594 4946 flags.go:64] FLAG: --provider-id="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279600 4946 flags.go:64] FLAG: --qos-reserved="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279605 4946 flags.go:64] FLAG: --read-only-port="10255" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279609 4946 flags.go:64] FLAG: --register-node="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279614 4946 flags.go:64] FLAG: --register-schedulable="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279618 4946 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279627 4946 flags.go:64] FLAG: --registry-burst="10" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279631 4946 flags.go:64] FLAG: --registry-qps="5" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279636 4946 flags.go:64] FLAG: --reserved-cpus="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279640 4946 flags.go:64] FLAG: --reserved-memory="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279647 4946 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279651 4946 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279658 4946 flags.go:64] FLAG: --rotate-certificates="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279662 4946 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279666 4946 flags.go:64] FLAG: --runonce="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279671 4946 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279675 4946 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279680 4946 flags.go:64] FLAG: --seccomp-default="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279684 4946 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279688 4946 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279693 4946 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279697 4946 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279702 4946 flags.go:64] FLAG: --storage-driver-password="root" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279706 4946 flags.go:64] FLAG: --storage-driver-secure="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279710 4946 flags.go:64] FLAG: --storage-driver-table="stats" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279714 4946 flags.go:64] FLAG: --storage-driver-user="root" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279719 4946 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279723 4946 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279727 4946 flags.go:64] FLAG: --system-cgroups="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279731 4946 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279738 4946 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279742 4946 flags.go:64] FLAG: --tls-cert-file="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279746 4946 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279753 4946 flags.go:64] FLAG: --tls-min-version="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279757 4946 flags.go:64] FLAG: --tls-private-key-file="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279761 4946 flags.go:64] FLAG: --topology-manager-policy="none" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279766 4946 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279770 4946 flags.go:64] FLAG: --topology-manager-scope="container" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279775 4946 flags.go:64] FLAG: --v="2" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279781 4946 flags.go:64] FLAG: --version="false" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279788 4946 flags.go:64] FLAG: --vmodule="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279794 4946 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.279799 4946 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279927 4946 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279934 4946 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279938 4946 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279942 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279947 4946 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279952 4946 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279956 4946 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279960 4946 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279963 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279967 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279971 4946 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279974 4946 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279978 4946 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279981 4946 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279986 4946 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279990 4946 feature_gate.go:330] unrecognized feature gate: Example Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279995 4946 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.279999 4946 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280002 4946 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280006 4946 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280009 4946 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280013 4946 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280016 4946 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280020 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280023 4946 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280027 4946 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280030 4946 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280034 4946 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280038 4946 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280041 4946 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280045 4946 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280048 4946 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280052 4946 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280056 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280060 4946 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280064 4946 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280067 4946 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280070 4946 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280074 4946 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280077 4946 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280082 4946 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280085 4946 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280088 4946 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280092 4946 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280095 4946 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280099 4946 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280102 4946 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280106 4946 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280110 4946 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280116 4946 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280134 4946 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280138 4946 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280142 4946 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280147 4946 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280150 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280154 4946 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280158 4946 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280162 4946 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280165 4946 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280169 4946 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280172 4946 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280176 4946 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280180 4946 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280183 4946 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280188 4946 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280194 4946 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280198 4946 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280201 4946 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280205 4946 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280208 4946 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.280212 4946 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.280228 4946 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.290590 4946 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.290634 4946 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290734 4946 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290751 4946 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290791 4946 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290798 4946 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290806 4946 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290812 4946 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290820 4946 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290829 4946 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290835 4946 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290843 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290849 4946 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290855 4946 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290861 4946 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290867 4946 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290871 4946 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290876 4946 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290883 4946 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290892 4946 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290897 4946 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290902 4946 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290907 4946 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290913 4946 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290917 4946 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290922 4946 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290927 4946 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290932 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290937 4946 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290942 4946 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290946 4946 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290951 4946 feature_gate.go:330] unrecognized feature gate: Example Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290957 4946 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290962 4946 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290966 4946 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290971 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290978 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290983 4946 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290988 4946 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290993 4946 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.290998 4946 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291003 4946 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291009 4946 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291015 4946 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291021 4946 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291026 4946 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291032 4946 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291037 4946 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291043 4946 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291048 4946 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291053 4946 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291059 4946 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291065 4946 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291070 4946 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291075 4946 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291080 4946 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291085 4946 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291090 4946 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291094 4946 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291099 4946 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291105 4946 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291110 4946 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291154 4946 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291166 4946 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291173 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291179 4946 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291208 4946 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291215 4946 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291221 4946 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291273 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291285 4946 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291295 4946 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291303 4946 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.291315 4946 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291483 4946 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291504 4946 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291511 4946 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291519 4946 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291525 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291531 4946 feature_gate.go:330] unrecognized feature gate: Example Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291537 4946 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291543 4946 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291550 4946 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291558 4946 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291564 4946 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291571 4946 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291577 4946 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291583 4946 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291590 4946 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291596 4946 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291602 4946 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291608 4946 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291613 4946 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291619 4946 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291623 4946 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291628 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291633 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291637 4946 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291642 4946 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291647 4946 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291652 4946 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291657 4946 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291661 4946 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291667 4946 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291672 4946 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291677 4946 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291682 4946 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291686 4946 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291693 4946 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291700 4946 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291706 4946 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291711 4946 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291716 4946 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291721 4946 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291727 4946 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291732 4946 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291736 4946 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291741 4946 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291746 4946 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291751 4946 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291756 4946 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291760 4946 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291765 4946 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291770 4946 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291775 4946 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291780 4946 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291786 4946 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291792 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291797 4946 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291803 4946 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291808 4946 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291813 4946 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291818 4946 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291823 4946 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291828 4946 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291834 4946 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291839 4946 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291843 4946 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291849 4946 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291855 4946 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291861 4946 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291866 4946 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291872 4946 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291877 4946 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.291883 4946 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.291892 4946 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.292187 4946 server.go:940] "Client rotation is on, will bootstrap in background" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.295810 4946 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.295927 4946 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.297081 4946 server.go:997] "Starting client certificate rotation" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.297108 4946 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.297633 4946 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-08 03:27:51.679430759 +0000 UTC Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.297780 4946 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.307112 4946 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.309489 4946 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.314526 4946 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.318695 4946 log.go:25] "Validated CRI v1 runtime API" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.334035 4946 log.go:25] "Validated CRI v1 image API" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.335724 4946 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.338571 4946 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-04-14-57-16-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.338611 4946 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.382786 4946 manager.go:217] Machine: {Timestamp:2025-12-04 15:02:29.381350274 +0000 UTC m=+0.267393945 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:0d5ec34d-e409-4ecd-b977-fe4455c38295 BootID:8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7 Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:ea:31:92 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:ea:31:92 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:41:07:ee Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:87:75:2b Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:f7:ef:e2 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:69:97:09 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:8e:85:25:c3:4d:e5 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:06:9e:53:72:cc:2e Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.383085 4946 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.383237 4946 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.383738 4946 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.383944 4946 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.383982 4946 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.384228 4946 topology_manager.go:138] "Creating topology manager with none policy" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.384241 4946 container_manager_linux.go:303] "Creating device plugin manager" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.384484 4946 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.384529 4946 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.384691 4946 state_mem.go:36] "Initialized new in-memory state store" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.385076 4946 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.385733 4946 kubelet.go:418] "Attempting to sync node with API server" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.385755 4946 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.385796 4946 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.385810 4946 kubelet.go:324] "Adding apiserver pod source" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.385825 4946 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.387776 4946 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.388245 4946 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.388246 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.388403 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.388491 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.388938 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.391168 4946 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393328 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393390 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393412 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393430 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393462 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393482 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393500 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393530 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393552 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393569 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393590 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393605 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.393651 4946 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.394567 4946 server.go:1280] "Started kubelet" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.394741 4946 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.395183 4946 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.395166 4946 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.395860 4946 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 04 15:02:29 crc systemd[1]: Started Kubernetes Kubelet. Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.397557 4946 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.397609 4946 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.397658 4946 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 01:22:02.790232029 +0000 UTC Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.397918 4946 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 250h19m33.392324419s for next certificate rotation Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.398019 4946 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.398038 4946 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.398179 4946 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.398191 4946 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.399428 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.399518 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.399991 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="200ms" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.402526 4946 server.go:460] "Adding debug handlers to kubelet server" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.402297 4946 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.220:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e0b51acc26378 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 15:02:29.394490232 +0000 UTC m=+0.280533913,LastTimestamp:2025-12-04 15:02:29.394490232 +0000 UTC m=+0.280533913,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.403993 4946 factory.go:55] Registering systemd factory Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.404023 4946 factory.go:221] Registration of the systemd container factory successfully Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.404399 4946 factory.go:153] Registering CRI-O factory Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.404430 4946 factory.go:221] Registration of the crio container factory successfully Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.405190 4946 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.405246 4946 factory.go:103] Registering Raw factory Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.405266 4946 manager.go:1196] Started watching for new ooms in manager Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.406260 4946 manager.go:319] Starting recovery of all containers Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407066 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407171 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407197 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407212 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407231 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407244 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407258 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407276 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407291 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407308 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407321 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407338 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407350 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407372 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407384 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407401 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407418 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407433 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407450 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407465 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407483 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407497 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407510 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407528 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407540 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407558 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407575 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407601 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407616 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.407672 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408503 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408519 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408543 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408560 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408605 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408625 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408638 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408655 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408670 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408683 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408700 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408712 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408727 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408740 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408754 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408781 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408795 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408810 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408826 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408840 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408856 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408868 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408895 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408915 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408931 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408948 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408969 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408984 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.408997 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409015 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409028 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409044 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409058 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409070 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409089 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409101 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409148 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409165 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409180 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409197 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409212 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409228 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409242 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409256 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409272 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409285 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409300 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409314 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409327 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409384 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409401 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409418 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409431 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409444 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409464 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409477 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409495 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409509 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409523 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409540 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409554 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409567 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409584 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409598 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409614 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409626 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409640 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409658 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409671 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409692 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409706 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409719 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409737 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409750 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409774 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409794 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409813 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409832 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409850 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409867 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409886 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409904 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409920 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409940 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409953 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409975 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.409987 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410003 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410017 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410029 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410044 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410057 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410073 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410087 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410100 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410137 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410150 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410167 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410186 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410202 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410218 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410231 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410245 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410263 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410276 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410296 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410309 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410323 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410340 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410353 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410372 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410386 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410399 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410417 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410431 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410447 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410520 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410536 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410552 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410564 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410578 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410593 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410605 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410621 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410634 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410655 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410672 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410685 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410701 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410713 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410726 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410742 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410755 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410772 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410785 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410800 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410816 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410830 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410850 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410863 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410877 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410892 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410907 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410927 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410940 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410955 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410971 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.410985 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411004 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411018 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411031 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411047 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411068 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411084 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411097 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411162 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411183 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411199 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411214 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411229 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.411242 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414595 4946 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414648 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414676 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414695 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414714 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414732 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414747 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414766 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414780 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414794 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414809 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414823 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414836 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414849 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414863 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414876 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414889 4946 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414901 4946 reconstruct.go:97] "Volume reconstruction finished" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.414909 4946 reconciler.go:26] "Reconciler: start to sync state" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.425251 4946 manager.go:324] Recovery completed Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.440262 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.442823 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.442918 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.442934 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.444256 4946 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.444277 4946 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.444330 4946 state_mem.go:36] "Initialized new in-memory state store" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.448872 4946 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.451342 4946 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.451411 4946 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.451450 4946 kubelet.go:2335] "Starting kubelet main sync loop" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.451520 4946 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.499247 4946 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.552210 4946 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.599892 4946 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.601888 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="400ms" Dec 04 15:02:29 crc kubenswrapper[4946]: W1204 15:02:29.636862 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.637009 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.690603 4946 policy_none.go:49] "None policy: Start" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.691831 4946 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.691889 4946 state_mem.go:35] "Initializing new in-memory state store" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.700157 4946 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.752366 4946 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.798356 4946 manager.go:334] "Starting Device Plugin manager" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.798409 4946 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.798424 4946 server.go:79] "Starting device plugin registration server" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.798811 4946 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.798829 4946 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.799164 4946 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.799273 4946 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.799297 4946 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.805540 4946 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.899198 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.900448 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.900484 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.900492 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:29 crc kubenswrapper[4946]: I1204 15:02:29.900516 4946 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 15:02:29 crc kubenswrapper[4946]: E1204 15:02:29.901048 4946 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Dec 04 15:02:30 crc kubenswrapper[4946]: E1204 15:02:30.003366 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="800ms" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.101831 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.103346 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.103439 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.103452 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.103489 4946 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 15:02:30 crc kubenswrapper[4946]: E1204 15:02:30.104271 4946 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.153186 4946 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.153335 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.154620 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.154649 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.154659 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.154761 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.155021 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.155055 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.155522 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.155565 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.155577 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.155772 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.155800 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.155810 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.155993 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.156139 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.156168 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.156986 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.157046 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.157006 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.157073 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.157056 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.157082 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.157724 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.158111 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.158270 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.158883 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.158911 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.158921 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.159065 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.159533 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.159568 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.160588 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.160661 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.160681 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.161240 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.161679 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.161781 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.161856 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.161692 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.162591 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.162699 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.161844 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.163784 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.163831 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.163841 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224662 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224698 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224715 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224733 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224752 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224767 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224783 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224800 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224833 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224854 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224875 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224895 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.224987 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.225036 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.225055 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.326908 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327013 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327053 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327109 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327162 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327230 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327195 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327285 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327356 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327303 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327385 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327444 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327467 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327469 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327485 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327509 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327535 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327544 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327552 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327585 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327650 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327675 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327707 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327680 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327719 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327678 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327770 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327787 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327817 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.327914 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.396226 4946 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:30 crc kubenswrapper[4946]: W1204 15:02:30.495990 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.496077 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: E1204 15:02:30.496088 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.505180 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.507253 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.507301 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.507313 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.507343 4946 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 15:02:30 crc kubenswrapper[4946]: E1204 15:02:30.507908 4946 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.513451 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.521797 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.546934 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: W1204 15:02:30.548336 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-50e6cad29654adf01c44f0ac0c070eed668676e80342887a7848d1b530a0814d WatchSource:0}: Error finding container 50e6cad29654adf01c44f0ac0c070eed668676e80342887a7848d1b530a0814d: Status 404 returned error can't find the container with id 50e6cad29654adf01c44f0ac0c070eed668676e80342887a7848d1b530a0814d Dec 04 15:02:30 crc kubenswrapper[4946]: I1204 15:02:30.549305 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 15:02:30 crc kubenswrapper[4946]: W1204 15:02:30.549592 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-f0faa44acdf87e9d5226e72e961415adc4a7168e43c4aa65ff94528a7a8acfed WatchSource:0}: Error finding container f0faa44acdf87e9d5226e72e961415adc4a7168e43c4aa65ff94528a7a8acfed: Status 404 returned error can't find the container with id f0faa44acdf87e9d5226e72e961415adc4a7168e43c4aa65ff94528a7a8acfed Dec 04 15:02:30 crc kubenswrapper[4946]: W1204 15:02:30.551521 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-e829e04a659eea1c204557d6a5ed8e1d207ff3fa59e42da26029e40a9060d177 WatchSource:0}: Error finding container e829e04a659eea1c204557d6a5ed8e1d207ff3fa59e42da26029e40a9060d177: Status 404 returned error can't find the container with id e829e04a659eea1c204557d6a5ed8e1d207ff3fa59e42da26029e40a9060d177 Dec 04 15:02:30 crc kubenswrapper[4946]: W1204 15:02:30.560529 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-b47a28205e65eac31731d4d0b039a4667e6d4b5ed5b9b7d0affb2b309d1398f2 WatchSource:0}: Error finding container b47a28205e65eac31731d4d0b039a4667e6d4b5ed5b9b7d0affb2b309d1398f2: Status 404 returned error can't find the container with id b47a28205e65eac31731d4d0b039a4667e6d4b5ed5b9b7d0affb2b309d1398f2 Dec 04 15:02:30 crc kubenswrapper[4946]: W1204 15:02:30.566971 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-a4464fd90540fb12933595b090e197f3946ec9cbab56095f10fffaa133a77631 WatchSource:0}: Error finding container a4464fd90540fb12933595b090e197f3946ec9cbab56095f10fffaa133a77631: Status 404 returned error can't find the container with id a4464fd90540fb12933595b090e197f3946ec9cbab56095f10fffaa133a77631 Dec 04 15:02:30 crc kubenswrapper[4946]: W1204 15:02:30.579211 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:30 crc kubenswrapper[4946]: E1204 15:02:30.579296 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:30 crc kubenswrapper[4946]: W1204 15:02:30.755791 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:30 crc kubenswrapper[4946]: E1204 15:02:30.755900 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:30 crc kubenswrapper[4946]: E1204 15:02:30.806763 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="1.6s" Dec 04 15:02:30 crc kubenswrapper[4946]: W1204 15:02:30.904795 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:30 crc kubenswrapper[4946]: E1204 15:02:30.904859 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:30 crc kubenswrapper[4946]: E1204 15:02:30.933929 4946 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.220:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e0b51acc26378 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 15:02:29.394490232 +0000 UTC m=+0.280533913,LastTimestamp:2025-12-04 15:02:29.394490232 +0000 UTC m=+0.280533913,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.308376 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.331577 4946 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 04 15:02:31 crc kubenswrapper[4946]: E1204 15:02:31.332848 4946 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.396269 4946 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.408215 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.408268 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.408279 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.408322 4946 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 15:02:31 crc kubenswrapper[4946]: E1204 15:02:31.409066 4946 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.460150 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f"} Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.460639 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b47a28205e65eac31731d4d0b039a4667e6d4b5ed5b9b7d0affb2b309d1398f2"} Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.461685 4946 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964" exitCode=0 Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.461797 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964"} Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.461878 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"50e6cad29654adf01c44f0ac0c070eed668676e80342887a7848d1b530a0814d"} Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.461994 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.463216 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.463262 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.463276 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.464899 4946 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5750447cd1d744a3c5125ac9cefa0c28729ffbab36f5656d859e784f33259c2c" exitCode=0 Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.465016 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5750447cd1d744a3c5125ac9cefa0c28729ffbab36f5656d859e784f33259c2c"} Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.465051 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f0faa44acdf87e9d5226e72e961415adc4a7168e43c4aa65ff94528a7a8acfed"} Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.465167 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.465204 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.466065 4946 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="d188cd5f6e35af6fe300e8761b2d502b44b37c0b2edc2f09754bf85fa67f0d57" exitCode=0 Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.466151 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"d188cd5f6e35af6fe300e8761b2d502b44b37c0b2edc2f09754bf85fa67f0d57"} Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.466179 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"e829e04a659eea1c204557d6a5ed8e1d207ff3fa59e42da26029e40a9060d177"} Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.466205 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.466232 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.466244 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.466252 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.466617 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.466645 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.466653 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.467068 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.467090 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.467100 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.469604 4946 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4" exitCode=0 Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.469663 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4"} Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.469690 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a4464fd90540fb12933595b090e197f3946ec9cbab56095f10fffaa133a77631"} Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.469793 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.470823 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.470864 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:31 crc kubenswrapper[4946]: I1204 15:02:31.470877 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:32 crc kubenswrapper[4946]: I1204 15:02:32.395794 4946 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:32 crc kubenswrapper[4946]: E1204 15:02:32.407700 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="3.2s" Dec 04 15:02:32 crc kubenswrapper[4946]: I1204 15:02:32.476353 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244"} Dec 04 15:02:32 crc kubenswrapper[4946]: I1204 15:02:32.479795 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b"} Dec 04 15:02:32 crc kubenswrapper[4946]: I1204 15:02:32.487472 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8"} Dec 04 15:02:32 crc kubenswrapper[4946]: I1204 15:02:32.489792 4946 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="ee6c95868bfa5a0d4d2d388fc654ce7ffa793da975943c6fefb06e03cf6c60b9" exitCode=0 Dec 04 15:02:32 crc kubenswrapper[4946]: I1204 15:02:32.489872 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"ee6c95868bfa5a0d4d2d388fc654ce7ffa793da975943c6fefb06e03cf6c60b9"} Dec 04 15:02:32 crc kubenswrapper[4946]: I1204 15:02:32.491986 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"42aa2de3e954b29b1d06dbfa70cfce3fd4ab67429abbbd4ec07aac4284f32e36"} Dec 04 15:02:32 crc kubenswrapper[4946]: I1204 15:02:32.492173 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:32 crc kubenswrapper[4946]: I1204 15:02:32.493350 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:32 crc kubenswrapper[4946]: I1204 15:02:32.493392 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:32 crc kubenswrapper[4946]: I1204 15:02:32.493406 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:32 crc kubenswrapper[4946]: W1204 15:02:32.593431 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:32 crc kubenswrapper[4946]: E1204 15:02:32.593538 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:32 crc kubenswrapper[4946]: W1204 15:02:32.771642 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:32 crc kubenswrapper[4946]: E1204 15:02:32.771744 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:32 crc kubenswrapper[4946]: W1204 15:02:32.786779 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Dec 04 15:02:32 crc kubenswrapper[4946]: E1204 15:02:32.786858 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.009557 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.011084 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.011152 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.011163 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.011192 4946 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 15:02:33 crc kubenswrapper[4946]: E1204 15:02:33.011616 4946 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.502675 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df"} Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.502730 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4"} Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.502747 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563"} Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.502759 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d"} Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.502786 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.503805 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.503843 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.503852 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.505212 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32"} Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.505265 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541"} Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.505268 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.506967 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.507023 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.507045 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.508328 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b"} Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.508368 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.508374 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3"} Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.508380 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.508940 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.508971 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.508991 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.509265 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.509289 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.509300 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.575221 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:33 crc kubenswrapper[4946]: I1204 15:02:33.583158 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.512998 4946 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="adc53226b1d284cc01ce63df1ea5745393d749eba5486cbf3a848be8b60d2f2d" exitCode=0 Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.513161 4946 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.513197 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.513190 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"adc53226b1d284cc01ce63df1ea5745393d749eba5486cbf3a848be8b60d2f2d"} Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.513197 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.513199 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.513415 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.513274 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.514217 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.514254 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.514264 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.514401 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.514443 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.514460 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.515065 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.515095 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.515106 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.515206 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.515257 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:34 crc kubenswrapper[4946]: I1204 15:02:34.515269 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.519383 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"633b16af3f2dc44b943e36316eb8457af8cdf8e9d263b36974fb11155c8de93c"} Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.519473 4946 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.519477 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"918c2134164eceb0ba967ebd0159ce5c3d84a1f4e6b56dd9d97a2fb1736e421e"} Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.519497 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9572530c7e88d9c7e687dfdb8aa78681aae7216645c9312212290e492d83e807"} Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.519530 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.519543 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.519669 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.520416 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"38611bce45609aecabae35e50df1ce29eb0b781cee771df33346effa66abfbcc"} Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.520439 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7a316f010d82bc045ac0d264da0be1d699823bec96a2306effb752fac7dfdd24"} Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.520836 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.520882 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.520897 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.520840 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.520932 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.520948 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.521721 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.521766 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.521779 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.656910 4946 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.898167 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:35 crc kubenswrapper[4946]: I1204 15:02:35.923852 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.019549 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.212233 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.213910 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.213950 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.213964 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.213992 4946 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.522443 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.522530 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.522793 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.523778 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.523828 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.523841 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.523885 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.523919 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.523945 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.524733 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.524769 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.524785 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.807256 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 04 15:02:36 crc kubenswrapper[4946]: I1204 15:02:36.908447 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 04 15:02:37 crc kubenswrapper[4946]: I1204 15:02:37.525573 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:37 crc kubenswrapper[4946]: I1204 15:02:37.525651 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:37 crc kubenswrapper[4946]: I1204 15:02:37.527061 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:37 crc kubenswrapper[4946]: I1204 15:02:37.527107 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:37 crc kubenswrapper[4946]: I1204 15:02:37.527137 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:37 crc kubenswrapper[4946]: I1204 15:02:37.527294 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:37 crc kubenswrapper[4946]: I1204 15:02:37.527326 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:37 crc kubenswrapper[4946]: I1204 15:02:37.527338 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:38 crc kubenswrapper[4946]: I1204 15:02:38.528191 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:38 crc kubenswrapper[4946]: I1204 15:02:38.528978 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:38 crc kubenswrapper[4946]: I1204 15:02:38.529005 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:38 crc kubenswrapper[4946]: I1204 15:02:38.529017 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:39 crc kubenswrapper[4946]: E1204 15:02:39.805704 4946 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.201419 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.201606 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.202742 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.202775 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.202788 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.206644 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.407878 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.408078 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.409303 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.409374 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.409411 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.534428 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.536019 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.536090 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:40 crc kubenswrapper[4946]: I1204 15:02:40.536106 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:41 crc kubenswrapper[4946]: I1204 15:02:41.700940 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:41 crc kubenswrapper[4946]: I1204 15:02:41.701390 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:41 crc kubenswrapper[4946]: I1204 15:02:41.703619 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:41 crc kubenswrapper[4946]: I1204 15:02:41.703688 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:41 crc kubenswrapper[4946]: I1204 15:02:41.703707 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:43 crc kubenswrapper[4946]: I1204 15:02:43.396566 4946 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 04 15:02:43 crc kubenswrapper[4946]: W1204 15:02:43.623740 4946 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 04 15:02:43 crc kubenswrapper[4946]: I1204 15:02:43.623837 4946 trace.go:236] Trace[1974814268]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Dec-2025 15:02:33.622) (total time: 10001ms): Dec 04 15:02:43 crc kubenswrapper[4946]: Trace[1974814268]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (15:02:43.623) Dec 04 15:02:43 crc kubenswrapper[4946]: Trace[1974814268]: [10.001394468s] [10.001394468s] END Dec 04 15:02:43 crc kubenswrapper[4946]: E1204 15:02:43.623859 4946 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 04 15:02:44 crc kubenswrapper[4946]: I1204 15:02:44.337149 4946 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 04 15:02:44 crc kubenswrapper[4946]: I1204 15:02:44.337254 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 04 15:02:44 crc kubenswrapper[4946]: I1204 15:02:44.342798 4946 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 04 15:02:44 crc kubenswrapper[4946]: I1204 15:02:44.342863 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 04 15:02:44 crc kubenswrapper[4946]: I1204 15:02:44.701221 4946 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 04 15:02:44 crc kubenswrapper[4946]: I1204 15:02:44.701302 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 04 15:02:45 crc kubenswrapper[4946]: I1204 15:02:45.904338 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:45 crc kubenswrapper[4946]: I1204 15:02:45.904967 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:45 crc kubenswrapper[4946]: I1204 15:02:45.906985 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:45 crc kubenswrapper[4946]: I1204 15:02:45.907061 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:45 crc kubenswrapper[4946]: I1204 15:02:45.907086 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:45 crc kubenswrapper[4946]: I1204 15:02:45.910739 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:46 crc kubenswrapper[4946]: I1204 15:02:46.549979 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:46 crc kubenswrapper[4946]: I1204 15:02:46.550835 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:46 crc kubenswrapper[4946]: I1204 15:02:46.550914 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:46 crc kubenswrapper[4946]: I1204 15:02:46.550935 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:46 crc kubenswrapper[4946]: I1204 15:02:46.937396 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 04 15:02:46 crc kubenswrapper[4946]: I1204 15:02:46.938379 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:46 crc kubenswrapper[4946]: I1204 15:02:46.940575 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:46 crc kubenswrapper[4946]: I1204 15:02:46.940632 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:46 crc kubenswrapper[4946]: I1204 15:02:46.940654 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:46 crc kubenswrapper[4946]: I1204 15:02:46.954301 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 04 15:02:47 crc kubenswrapper[4946]: I1204 15:02:47.552724 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:47 crc kubenswrapper[4946]: I1204 15:02:47.553891 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:47 crc kubenswrapper[4946]: I1204 15:02:47.553932 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:47 crc kubenswrapper[4946]: I1204 15:02:47.553942 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:47 crc kubenswrapper[4946]: I1204 15:02:47.821689 4946 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.397486 4946 apiserver.go:52] "Watching apiserver" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.401711 4946 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.402025 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.402539 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.402554 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:02:48 crc kubenswrapper[4946]: E1204 15:02:48.402712 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.403952 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.404142 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.404266 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.404322 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:48 crc kubenswrapper[4946]: E1204 15:02:48.404657 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.404748 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 04 15:02:48 crc kubenswrapper[4946]: E1204 15:02:48.404766 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.406146 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.406248 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.406617 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.407181 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.407478 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.408403 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.408615 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.408836 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.437365 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.452875 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.465308 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.480376 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.493085 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.499687 4946 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.503332 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.511617 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:48 crc kubenswrapper[4946]: I1204 15:02:48.522291 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.339326 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.341317 4946 trace.go:236] Trace[632757180]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Dec-2025 15:02:36.122) (total time: 13218ms): Dec 04 15:02:49 crc kubenswrapper[4946]: Trace[632757180]: ---"Objects listed" error: 13218ms (15:02:49.341) Dec 04 15:02:49 crc kubenswrapper[4946]: Trace[632757180]: [13.218571694s] [13.218571694s] END Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.341369 4946 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.341336 4946 trace.go:236] Trace[1931027189]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Dec-2025 15:02:38.266) (total time: 11074ms): Dec 04 15:02:49 crc kubenswrapper[4946]: Trace[1931027189]: ---"Objects listed" error: 11074ms (15:02:49.341) Dec 04 15:02:49 crc kubenswrapper[4946]: Trace[1931027189]: [11.074333258s] [11.074333258s] END Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.341603 4946 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.343540 4946 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.343608 4946 trace.go:236] Trace[1883482565]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Dec-2025 15:02:37.145) (total time: 12198ms): Dec 04 15:02:49 crc kubenswrapper[4946]: Trace[1883482565]: ---"Objects listed" error: 12198ms (15:02:49.343) Dec 04 15:02:49 crc kubenswrapper[4946]: Trace[1883482565]: [12.198216365s] [12.198216365s] END Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.343647 4946 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.344536 4946 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.347788 4946 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.395088 4946 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:50442->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.395204 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:50442->192.168.126.11:17697: read: connection reset by peer" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.395789 4946 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.395885 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.444916 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.444967 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.444994 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445017 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445039 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445062 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445091 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445138 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445167 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445193 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445212 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445234 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445254 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445278 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445300 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445329 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445353 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445377 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445402 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445430 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445476 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445512 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445544 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445547 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445575 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445595 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445656 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445686 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445709 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445724 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445742 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445756 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445821 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445866 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445894 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445901 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445945 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445957 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.445961 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446011 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446011 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446038 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446065 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446090 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446131 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446158 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446170 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446181 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446204 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446227 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446251 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446273 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446319 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446339 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446340 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446375 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446394 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446415 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446447 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446476 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446500 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446482 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446523 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446617 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446630 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446628 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446687 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446714 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446739 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446763 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446787 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446810 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446833 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446854 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446866 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446880 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446881 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446902 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446925 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446949 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446972 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446993 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447018 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447041 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447066 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447088 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447261 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447286 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447309 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447333 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447356 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447382 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447405 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447430 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447454 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447479 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447501 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447527 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447555 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446900 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447586 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.446981 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447006 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447074 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447085 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447655 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447173 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447226 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447318 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447323 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447697 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447349 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447370 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447467 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447495 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447448 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447572 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447620 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.447650 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:02:49.947589802 +0000 UTC m=+20.833633453 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447932 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.448012 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.448061 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.448083 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.448109 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.448347 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.448400 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.448604 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.449374 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.449765 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.450222 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.450425 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.450612 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.450805 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.450803 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.450879 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.451100 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.451105 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.452346 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.452381 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.452391 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.452586 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.452751 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.447582 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.453018 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.453165 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.453048 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.453285 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.452970 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.453421 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.453468 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.453284 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.453680 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.453983 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454000 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454027 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454046 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454082 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454095 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454139 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454183 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454205 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454302 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454389 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454425 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454466 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454495 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454520 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454546 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454571 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454650 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454655 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454704 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454730 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454766 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454800 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454821 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454844 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454865 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454886 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454905 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.454922 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455268 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455268 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455356 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455379 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455397 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455416 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455434 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455454 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455472 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455488 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455514 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455532 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455551 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455567 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455584 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455602 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455622 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455641 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455675 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455692 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455723 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455750 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455767 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455786 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455806 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455822 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455839 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455856 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455877 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455897 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455917 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455934 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455950 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455967 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455985 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456384 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456410 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456430 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456446 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456464 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456487 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456832 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456856 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456875 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456899 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456928 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456951 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.456978 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457000 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457022 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457040 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457060 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457080 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457099 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457136 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457154 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457175 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457193 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457210 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457228 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457246 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457262 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457278 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457298 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457320 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457345 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457363 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457385 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457402 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457420 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457435 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457453 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457471 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457486 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457501 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457518 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457538 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457554 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457570 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457587 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457604 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457633 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457652 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457669 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457684 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457705 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457721 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457743 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457799 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457826 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457853 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457877 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.457900 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458102 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458427 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458452 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458475 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458745 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458769 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458788 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458811 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458829 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458917 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458928 4946 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458939 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458949 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458959 4946 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458970 4946 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458980 4946 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.458989 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459660 4946 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459672 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459696 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459707 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459717 4946 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459733 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459747 4946 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459757 4946 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459767 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459778 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459788 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459801 4946 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459811 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459821 4946 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459831 4946 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459843 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459859 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.459869 4946 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461840 4946 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461861 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461872 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461882 4946 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461892 4946 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461916 4946 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461926 4946 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461935 4946 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461946 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461956 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461965 4946 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461976 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461987 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461997 4946 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462009 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462019 4946 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462029 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462038 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462050 4946 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462060 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462070 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462080 4946 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462089 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462099 4946 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462111 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462140 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462150 4946 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462160 4946 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462170 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462180 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462191 4946 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462200 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462210 4946 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462225 4946 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462234 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462244 4946 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462254 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462263 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462274 4946 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462283 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462292 4946 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462302 4946 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462312 4946 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462323 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462333 4946 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462343 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462353 4946 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.470191 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455481 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.455532 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.460238 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461255 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461482 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.461807 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462156 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462228 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462468 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.462870 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.460507 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.463089 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.463762 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.464265 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.465331 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.465360 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.465497 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.465607 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.465618 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.465739 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.466112 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.466266 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.466618 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.466804 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.466830 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.467141 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.467159 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.467485 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.467911 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.467970 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.468011 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.468038 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.468810 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.469602 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.469718 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.470250 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.467545 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.470771 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.472882 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.473218 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.473629 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.473962 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.474304 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.474533 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.474613 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.474640 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.475056 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.475245 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.475517 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.475531 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.475950 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.476069 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.476337 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.476601 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.476829 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.477052 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.477227 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.477544 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.477658 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.478356 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.480892 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.481593 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.481788 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.481824 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.481917 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.482108 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.482135 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.482134 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.482235 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.482732 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.482736 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.482947 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.483065 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.483095 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.483225 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.483264 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.483872 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.484374 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.484458 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.484480 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.485003 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.485174 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.484185 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.485344 4946 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.485637 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:49.985609399 +0000 UTC m=+20.871653230 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.488665 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.489107 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.489404 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.489540 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.490217 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.490768 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.493069 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.493191 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.493276 4946 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.493641 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.493762 4946 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.494206 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:49.994150125 +0000 UTC m=+20.880193776 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.494767 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:49.994749971 +0000 UTC m=+20.880793632 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.495337 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.495974 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.496141 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.496823 4946 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.498526 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.498567 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.499665 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.499769 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.501187 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.503078 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.501930 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.503844 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.505163 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.505268 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.503906 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.504428 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.506094 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.506603 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.506676 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.507255 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.507890 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.508739 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.508785 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.508889 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.508892 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.509065 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.510436 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.520685 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.521226 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.524217 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.524954 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.524988 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.526754 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.529752 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.529888 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.529987 4946 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.530151 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:50.030112466 +0000 UTC m=+20.916156107 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.531351 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.532493 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.533243 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.533446 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.533787 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.534770 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.535394 4946 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.536151 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.536340 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.536633 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.554047 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.555332 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.561176 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.563466 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.563906 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.563949 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564007 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564020 4946 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564033 4946 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564043 4946 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564053 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564065 4946 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564079 4946 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564092 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564104 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564184 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564134 4946 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564227 4946 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564242 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564254 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564266 4946 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564277 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564288 4946 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564301 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564318 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564330 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564343 4946 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564357 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564369 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564382 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564393 4946 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564404 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564415 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564426 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564438 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564450 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564461 4946 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564471 4946 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564483 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564495 4946 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564507 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564517 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564528 4946 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564538 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564549 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564560 4946 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564571 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564583 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564596 4946 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564279 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564608 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564662 4946 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564675 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564692 4946 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564703 4946 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564712 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564722 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564730 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564741 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564750 4946 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564759 4946 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564768 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564776 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564785 4946 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564794 4946 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564803 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564812 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564821 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564830 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564839 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564847 4946 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564857 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564868 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564898 4946 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564909 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564919 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564929 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564938 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564966 4946 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564976 4946 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564999 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565009 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565019 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565029 4946 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565040 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565050 4946 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565061 4946 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565070 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565080 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565091 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565101 4946 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565111 4946 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565159 4946 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565169 4946 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565179 4946 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565190 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565200 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565209 4946 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565220 4946 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565234 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565243 4946 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565252 4946 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565261 4946 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565271 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565281 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565290 4946 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565302 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565312 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565321 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565331 4946 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565344 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565359 4946 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565368 4946 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565377 4946 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565386 4946 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565395 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565404 4946 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565413 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565425 4946 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565436 4946 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565448 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565460 4946 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565471 4946 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565479 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565488 4946 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565497 4946 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565507 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.564591 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.565587 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.566346 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.566885 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.571413 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.573337 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.576646 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.579400 4946 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df" exitCode=255 Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.585988 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.586929 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.594820 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.595404 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.604072 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.604213 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.604720 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.607895 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.609599 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.610510 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.611831 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.612585 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.613717 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.614762 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.615836 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.616326 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.616776 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.617674 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.617960 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.618476 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.619715 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.620334 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df"} Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.626336 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.633978 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.648025 4946 scope.go:117] "RemoveContainer" containerID="f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.648734 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.649517 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.666134 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.666169 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.671284 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.695194 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.713194 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.728526 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.746585 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 15:02:49 crc kubenswrapper[4946]: I1204 15:02:49.967501 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:49 crc kubenswrapper[4946]: E1204 15:02:49.967701 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:02:50.967668853 +0000 UTC m=+21.853712494 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.069012 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.069075 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.069107 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.069144 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069293 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069312 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069323 4946 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069355 4946 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069390 4946 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069363 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069489 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069506 4946 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069393 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:51.069370555 +0000 UTC m=+21.955414196 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069565 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:51.06955481 +0000 UTC m=+21.955598451 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069583 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:51.069574651 +0000 UTC m=+21.955618292 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.069605 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:51.069596701 +0000 UTC m=+21.955640342 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.177888 4946 csr.go:261] certificate signing request csr-8qpnh is approved, waiting to be issued Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.216750 4946 csr.go:257] certificate signing request csr-8qpnh is issued Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.327224 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.452149 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.452233 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.452294 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.452236 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.452511 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.452654 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.584495 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.585909 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a"} Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.586077 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.587220 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575"} Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.587257 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245"} Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.587268 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f001e49b38f5555b604ba061082c90fdf2ad038f58ea048d63a27f08df0a937b"} Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.590180 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"efd8cc8ab94cb983d97370c0255bb9f5a02ec119469ddaffe79517d27715160f"} Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.591416 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7"} Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.591451 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"2b4f6deef80b8771c2597efcdb8e2d248e7074b2691cf79c766abf0c6bb96f4d"} Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.616834 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.651196 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-s76w5"] Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.651503 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-s76w5" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.655774 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.660106 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.660106 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.664297 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.690629 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.705156 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.722857 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.738461 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.757474 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.772455 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.775169 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bb0fa893-f2f4-4864-a154-fb91f3dc76f0-hosts-file\") pod \"node-resolver-s76w5\" (UID: \"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\") " pod="openshift-dns/node-resolver-s76w5" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.775225 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl4st\" (UniqueName: \"kubernetes.io/projected/bb0fa893-f2f4-4864-a154-fb91f3dc76f0-kube-api-access-bl4st\") pod \"node-resolver-s76w5\" (UID: \"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\") " pod="openshift-dns/node-resolver-s76w5" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.786341 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.802581 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.816495 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.842955 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.869704 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.876398 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bb0fa893-f2f4-4864-a154-fb91f3dc76f0-hosts-file\") pod \"node-resolver-s76w5\" (UID: \"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\") " pod="openshift-dns/node-resolver-s76w5" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.876439 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl4st\" (UniqueName: \"kubernetes.io/projected/bb0fa893-f2f4-4864-a154-fb91f3dc76f0-kube-api-access-bl4st\") pod \"node-resolver-s76w5\" (UID: \"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\") " pod="openshift-dns/node-resolver-s76w5" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.876582 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bb0fa893-f2f4-4864-a154-fb91f3dc76f0-hosts-file\") pod \"node-resolver-s76w5\" (UID: \"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\") " pod="openshift-dns/node-resolver-s76w5" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.888458 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.900758 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl4st\" (UniqueName: \"kubernetes.io/projected/bb0fa893-f2f4-4864-a154-fb91f3dc76f0-kube-api-access-bl4st\") pod \"node-resolver-s76w5\" (UID: \"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\") " pod="openshift-dns/node-resolver-s76w5" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.912891 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:50Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.965262 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-s76w5" Dec 04 15:02:50 crc kubenswrapper[4946]: I1204 15:02:50.977412 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:50 crc kubenswrapper[4946]: E1204 15:02:50.977647 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:02:52.977612562 +0000 UTC m=+23.863656213 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:02:50 crc kubenswrapper[4946]: W1204 15:02:50.977882 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb0fa893_f2f4_4864_a154_fb91f3dc76f0.slice/crio-8025839b693ca373fa850ba188936a4e7478ea70d901fdb84913641fbfb62dab WatchSource:0}: Error finding container 8025839b693ca373fa850ba188936a4e7478ea70d901fdb84913641fbfb62dab: Status 404 returned error can't find the container with id 8025839b693ca373fa850ba188936a4e7478ea70d901fdb84913641fbfb62dab Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.047667 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-fjmh5"] Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.048087 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.053635 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.053856 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.054376 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.055289 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.056740 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.057446 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-qhv79"] Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.058104 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-fzjk8"] Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.058459 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.061970 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.070444 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.070518 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.070623 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.070936 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.071078 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.071275 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.074362 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.077897 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.077933 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.077956 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.077979 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078101 4946 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078172 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:53.078154022 +0000 UTC m=+23.964197663 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078466 4946 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078499 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:53.078492542 +0000 UTC m=+23.964536183 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078565 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078577 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078589 4946 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078613 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:53.078606415 +0000 UTC m=+23.964650056 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078656 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078666 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078674 4946 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:51 crc kubenswrapper[4946]: E1204 15:02:51.078707 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:53.078688647 +0000 UTC m=+23.964732288 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.104816 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.125722 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.154921 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.175927 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179275 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-etc-kubernetes\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179316 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dt75h\" (UniqueName: \"kubernetes.io/projected/1f47d6bc-3d05-4c97-902f-5714244b2a1c-kube-api-access-dt75h\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179338 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/534e3524-8e4b-474e-b14e-2da113cec158-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179490 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-var-lib-cni-bin\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179577 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1f47d6bc-3d05-4c97-902f-5714244b2a1c-proxy-tls\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179604 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-cnibin\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179653 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-cni-dir\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179675 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-os-release\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179781 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-socket-dir-parent\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179809 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-system-cni-dir\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179836 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/534e3524-8e4b-474e-b14e-2da113cec158-cni-binary-copy\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179947 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-system-cni-dir\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.179988 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-var-lib-cni-multus\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180030 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-daemon-config\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180058 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-conf-dir\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180080 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbs5t\" (UniqueName: \"kubernetes.io/projected/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-kube-api-access-kbs5t\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180097 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180143 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-hostroot\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180170 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-os-release\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180198 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-run-k8s-cni-cncf-io\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180226 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-cni-binary-copy\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180243 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-var-lib-kubelet\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180259 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-cnibin\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180287 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-run-netns\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180303 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1f47d6bc-3d05-4c97-902f-5714244b2a1c-mcd-auth-proxy-config\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180324 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7hh2\" (UniqueName: \"kubernetes.io/projected/534e3524-8e4b-474e-b14e-2da113cec158-kube-api-access-c7hh2\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180356 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-run-multus-certs\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.180371 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1f47d6bc-3d05-4c97-902f-5714244b2a1c-rootfs\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.198478 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.213049 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.218811 4946 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-04 14:57:50 +0000 UTC, rotation deadline is 2026-08-23 18:31:12.325007426 +0000 UTC Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.218848 4946 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6291h28m21.106163227s for next certificate rotation Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.225056 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.238392 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.248095 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.261951 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.275083 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280711 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-run-k8s-cni-cncf-io\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280744 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-cnibin\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280764 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-cni-binary-copy\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280780 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-var-lib-kubelet\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280803 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-run-netns\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280820 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1f47d6bc-3d05-4c97-902f-5714244b2a1c-mcd-auth-proxy-config\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280837 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7hh2\" (UniqueName: \"kubernetes.io/projected/534e3524-8e4b-474e-b14e-2da113cec158-kube-api-access-c7hh2\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280862 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-run-multus-certs\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280879 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1f47d6bc-3d05-4c97-902f-5714244b2a1c-rootfs\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280906 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-etc-kubernetes\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280829 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-run-k8s-cni-cncf-io\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280939 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-run-multus-certs\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280915 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-var-lib-kubelet\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.280995 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-run-netns\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281012 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-cnibin\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281205 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1f47d6bc-3d05-4c97-902f-5714244b2a1c-rootfs\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281253 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-etc-kubernetes\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281334 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dt75h\" (UniqueName: \"kubernetes.io/projected/1f47d6bc-3d05-4c97-902f-5714244b2a1c-kube-api-access-dt75h\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281354 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/534e3524-8e4b-474e-b14e-2da113cec158-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281375 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-var-lib-cni-bin\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281391 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1f47d6bc-3d05-4c97-902f-5714244b2a1c-proxy-tls\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281410 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-cnibin\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281476 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-cni-dir\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281495 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/534e3524-8e4b-474e-b14e-2da113cec158-cni-binary-copy\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281513 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-os-release\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281530 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-socket-dir-parent\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281546 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-system-cni-dir\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281580 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-daemon-config\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281609 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-system-cni-dir\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281629 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-var-lib-cni-multus\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281629 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-cni-binary-copy\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281646 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-conf-dir\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281681 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbs5t\" (UniqueName: \"kubernetes.io/projected/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-kube-api-access-kbs5t\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281698 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281714 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-hostroot\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281732 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-os-release\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281839 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1f47d6bc-3d05-4c97-902f-5714244b2a1c-mcd-auth-proxy-config\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281885 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-os-release\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281916 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-var-lib-cni-multus\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281917 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-system-cni-dir\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.281956 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-conf-dir\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.282015 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-os-release\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.282060 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-socket-dir-parent\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.282092 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-system-cni-dir\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.282180 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/534e3524-8e4b-474e-b14e-2da113cec158-cni-binary-copy\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.282226 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-hostroot\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.282503 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-cnibin\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.282570 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-daemon-config\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.282636 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-host-var-lib-cni-bin\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.282685 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-multus-cni-dir\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.283136 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/534e3524-8e4b-474e-b14e-2da113cec158-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.287540 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1f47d6bc-3d05-4c97-902f-5714244b2a1c-proxy-tls\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.292567 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.298168 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7hh2\" (UniqueName: \"kubernetes.io/projected/534e3524-8e4b-474e-b14e-2da113cec158-kube-api-access-c7hh2\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.301651 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbs5t\" (UniqueName: \"kubernetes.io/projected/f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09-kube-api-access-kbs5t\") pod \"multus-fjmh5\" (UID: \"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\") " pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.302579 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dt75h\" (UniqueName: \"kubernetes.io/projected/1f47d6bc-3d05-4c97-902f-5714244b2a1c-kube-api-access-dt75h\") pod \"machine-config-daemon-qhv79\" (UID: \"1f47d6bc-3d05-4c97-902f-5714244b2a1c\") " pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.305819 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.320708 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.338347 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.358069 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.370882 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.381234 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-fjmh5" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.387600 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.390735 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.409135 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.431710 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:51Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.456802 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.457699 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.458982 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.459813 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.460992 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.461535 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.462173 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.463226 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.463864 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.464887 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.465485 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.466627 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.467161 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.467648 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.468620 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.469202 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.470359 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.470825 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.471547 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.473167 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.474232 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.474700 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.475392 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.475795 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.476452 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.476928 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.477422 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-w598m"] Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.481241 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.484288 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.486299 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.486489 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.486583 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.486782 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.486927 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.487002 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.583907 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-var-lib-openvswitch\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.583959 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-script-lib\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584002 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-ovn\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584024 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-config\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584160 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-log-socket\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584185 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-ovn-kubernetes\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584209 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovn-node-metrics-cert\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584229 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-netd\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584253 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-slash\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584272 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-etc-openvswitch\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584290 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-bin\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584318 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-netns\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584334 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-systemd\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584353 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-systemd-units\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584379 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-openvswitch\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584397 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-env-overrides\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584436 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-kubelet\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584452 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-node-log\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584475 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.584502 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fd42\" (UniqueName: \"kubernetes.io/projected/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-kube-api-access-8fd42\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.595403 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-s76w5" event={"ID":"bb0fa893-f2f4-4864-a154-fb91f3dc76f0","Type":"ContainerStarted","Data":"8025839b693ca373fa850ba188936a4e7478ea70d901fdb84913641fbfb62dab"} Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.596656 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fjmh5" event={"ID":"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09","Type":"ContainerStarted","Data":"70bbf5a7c621a7f5a1b013b947179d6da2792dff1cdf5f5175e8b48ca191bab9"} Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686026 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-netns\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686076 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-systemd\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686104 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-systemd-units\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686156 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-openvswitch\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686181 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-env-overrides\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686193 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-netns\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686225 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-kubelet\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686202 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-kubelet\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686266 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-openvswitch\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686272 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-node-log\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686306 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-systemd\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686356 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686297 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-node-log\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686329 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686321 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-systemd-units\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686465 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fd42\" (UniqueName: \"kubernetes.io/projected/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-kube-api-access-8fd42\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686516 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-script-lib\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686553 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-var-lib-openvswitch\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686592 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-ovn\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686624 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-config\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686652 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-log-socket\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686674 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-ovn-kubernetes\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686702 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovn-node-metrics-cert\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686737 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-netd\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686780 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-slash\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686809 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-etc-openvswitch\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686845 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-bin\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686898 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-log-socket\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686931 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-bin\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.686943 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-ovn-kubernetes\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.687240 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-env-overrides\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.687284 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-etc-openvswitch\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.687296 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-netd\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.687312 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-ovn\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.687341 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-var-lib-openvswitch\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.687347 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-slash\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.687627 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-script-lib\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.688093 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-config\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.702232 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovn-node-metrics-cert\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.704891 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.706050 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fd42\" (UniqueName: \"kubernetes.io/projected/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-kube-api-access-8fd42\") pod \"ovnkube-node-w598m\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.708818 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.715384 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 04 15:02:51 crc kubenswrapper[4946]: I1204 15:02:51.796322 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.072756 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/534e3524-8e4b-474e-b14e-2da113cec158-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fzjk8\" (UID: \"534e3524-8e4b-474e-b14e-2da113cec158\") " pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.089468 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.104374 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.127426 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.144604 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.158586 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.170415 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.187435 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.203866 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.219172 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.232197 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.244537 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.260391 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.274305 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.291101 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.302385 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.307341 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.321267 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.345011 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: W1204 15:02:52.355634 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f47d6bc_3d05_4c97_902f_5714244b2a1c.slice/crio-4d7ab459e90eec043319c05aaaff609dd6dcb4032d83f6ae508d547b810a23ea WatchSource:0}: Error finding container 4d7ab459e90eec043319c05aaaff609dd6dcb4032d83f6ae508d547b810a23ea: Status 404 returned error can't find the container with id 4d7ab459e90eec043319c05aaaff609dd6dcb4032d83f6ae508d547b810a23ea Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.359751 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: W1204 15:02:52.372504 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3537c3df_cdbc_4e1c_aee1_f2d942207a5a.slice/crio-97c226b566a4a0e837159d00d000b318d958a153901272fab7a39757e15e10ec WatchSource:0}: Error finding container 97c226b566a4a0e837159d00d000b318d958a153901272fab7a39757e15e10ec: Status 404 returned error can't find the container with id 97c226b566a4a0e837159d00d000b318d958a153901272fab7a39757e15e10ec Dec 04 15:02:52 crc kubenswrapper[4946]: W1204 15:02:52.380627 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod534e3524_8e4b_474e_b14e_2da113cec158.slice/crio-85847f3af90abfeaa23001ab89b918d2ae061dc4aba5b9ca0bdecc2abf5e5c99 WatchSource:0}: Error finding container 85847f3af90abfeaa23001ab89b918d2ae061dc4aba5b9ca0bdecc2abf5e5c99: Status 404 returned error can't find the container with id 85847f3af90abfeaa23001ab89b918d2ae061dc4aba5b9ca0bdecc2abf5e5c99 Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.386454 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.399246 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.423156 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.445447 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.452137 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.452266 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.452342 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:52 crc kubenswrapper[4946]: E1204 15:02:52.452457 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:02:52 crc kubenswrapper[4946]: E1204 15:02:52.452600 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:02:52 crc kubenswrapper[4946]: E1204 15:02:52.452276 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.465380 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.482861 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.500232 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.603529 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" event={"ID":"534e3524-8e4b-474e-b14e-2da113cec158","Type":"ContainerStarted","Data":"85847f3af90abfeaa23001ab89b918d2ae061dc4aba5b9ca0bdecc2abf5e5c99"} Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.604641 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13"} Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.604668 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"4d7ab459e90eec043319c05aaaff609dd6dcb4032d83f6ae508d547b810a23ea"} Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.605545 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fjmh5" event={"ID":"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09","Type":"ContainerStarted","Data":"c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f"} Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.608812 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-s76w5" event={"ID":"bb0fa893-f2f4-4864-a154-fb91f3dc76f0","Type":"ContainerStarted","Data":"bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24"} Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.618456 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7" exitCode=0 Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.618548 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7"} Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.618616 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"97c226b566a4a0e837159d00d000b318d958a153901272fab7a39757e15e10ec"} Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.620557 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.620848 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2"} Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.641470 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.693199 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.739875 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.766638 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.790932 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.813212 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.830039 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.848087 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.862791 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.880174 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.911728 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.935654 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.956284 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.970269 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.984760 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:52 crc kubenswrapper[4946]: I1204 15:02:52.997598 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:52Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.011790 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.012943 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.013129 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:02:57.013082949 +0000 UTC m=+27.899126590 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.025015 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.046797 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.058881 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.072357 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.085217 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.098632 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.113757 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.113822 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.113853 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.113876 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114078 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114099 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114127 4946 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114188 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:57.114168545 +0000 UTC m=+28.000212186 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114426 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114446 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114438 4946 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114514 4946 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114562 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:57.114536225 +0000 UTC m=+28.000580036 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114458 4946 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114622 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:57.114588826 +0000 UTC m=+28.000632487 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:02:53 crc kubenswrapper[4946]: E1204 15:02:53.114671 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 15:02:57.114653068 +0000 UTC m=+28.000696709 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.115155 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.133357 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.383052 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-jdjs9"] Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.383554 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-jdjs9" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.385635 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.385781 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.386747 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.386885 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.406332 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.423429 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.438464 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.455154 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.467703 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.485948 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.513427 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.518229 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a85cf818-0fdc-4438-8357-38be4a980937-host\") pod \"node-ca-jdjs9\" (UID: \"a85cf818-0fdc-4438-8357-38be4a980937\") " pod="openshift-image-registry/node-ca-jdjs9" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.518291 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9ktl\" (UniqueName: \"kubernetes.io/projected/a85cf818-0fdc-4438-8357-38be4a980937-kube-api-access-w9ktl\") pod \"node-ca-jdjs9\" (UID: \"a85cf818-0fdc-4438-8357-38be4a980937\") " pod="openshift-image-registry/node-ca-jdjs9" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.518322 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a85cf818-0fdc-4438-8357-38be4a980937-serviceca\") pod \"node-ca-jdjs9\" (UID: \"a85cf818-0fdc-4438-8357-38be4a980937\") " pod="openshift-image-registry/node-ca-jdjs9" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.529028 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.549744 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.559442 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.573552 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.586536 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.600278 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.614771 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.619069 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a85cf818-0fdc-4438-8357-38be4a980937-host\") pod \"node-ca-jdjs9\" (UID: \"a85cf818-0fdc-4438-8357-38be4a980937\") " pod="openshift-image-registry/node-ca-jdjs9" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.619106 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9ktl\" (UniqueName: \"kubernetes.io/projected/a85cf818-0fdc-4438-8357-38be4a980937-kube-api-access-w9ktl\") pod \"node-ca-jdjs9\" (UID: \"a85cf818-0fdc-4438-8357-38be4a980937\") " pod="openshift-image-registry/node-ca-jdjs9" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.619145 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a85cf818-0fdc-4438-8357-38be4a980937-serviceca\") pod \"node-ca-jdjs9\" (UID: \"a85cf818-0fdc-4438-8357-38be4a980937\") " pod="openshift-image-registry/node-ca-jdjs9" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.619270 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a85cf818-0fdc-4438-8357-38be4a980937-host\") pod \"node-ca-jdjs9\" (UID: \"a85cf818-0fdc-4438-8357-38be4a980937\") " pod="openshift-image-registry/node-ca-jdjs9" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.620063 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a85cf818-0fdc-4438-8357-38be4a980937-serviceca\") pod \"node-ca-jdjs9\" (UID: \"a85cf818-0fdc-4438-8357-38be4a980937\") " pod="openshift-image-registry/node-ca-jdjs9" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.626869 4946 generic.go:334] "Generic (PLEG): container finished" podID="534e3524-8e4b-474e-b14e-2da113cec158" containerID="16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d" exitCode=0 Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.626931 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" event={"ID":"534e3524-8e4b-474e-b14e-2da113cec158","Type":"ContainerDied","Data":"16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d"} Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.628774 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f"} Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.630769 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.631262 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39" exitCode=1 Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.631692 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d"} Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.631725 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07"} Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.631765 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8"} Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.631780 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39"} Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.631794 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359"} Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.643002 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9ktl\" (UniqueName: \"kubernetes.io/projected/a85cf818-0fdc-4438-8357-38be4a980937-kube-api-access-w9ktl\") pod \"node-ca-jdjs9\" (UID: \"a85cf818-0fdc-4438-8357-38be4a980937\") " pod="openshift-image-registry/node-ca-jdjs9" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.647875 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.670159 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.685874 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.700686 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-jdjs9" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.702005 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.726045 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.743565 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.762065 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.783734 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.799932 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.818272 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.832359 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.848504 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.859687 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.875284 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.898540 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.921641 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.940268 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:53 crc kubenswrapper[4946]: I1204 15:02:53.975436 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:53Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.018855 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.085443 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.135104 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.162757 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.182774 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.219239 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.257816 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.296512 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.340092 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.380415 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.452090 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.452090 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:02:54 crc kubenswrapper[4946]: E1204 15:02:54.452458 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:02:54 crc kubenswrapper[4946]: E1204 15:02:54.452531 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.452090 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:54 crc kubenswrapper[4946]: E1204 15:02:54.452739 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.636936 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-jdjs9" event={"ID":"a85cf818-0fdc-4438-8357-38be4a980937","Type":"ContainerStarted","Data":"de9017e0a9388bae07992f53d96dbe2d937caab931a5bf00ff120f0d05e908a0"} Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.638885 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" event={"ID":"534e3524-8e4b-474e-b14e-2da113cec158","Type":"ContainerStarted","Data":"7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b"} Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.642053 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.643237 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531"} Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.665190 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.677480 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.688233 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.701154 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.716045 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.729829 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.744243 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.757976 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.771835 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.787370 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.817885 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.858276 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.906443 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:54 crc kubenswrapper[4946]: I1204 15:02:54.938841 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.647057 4946 generic.go:334] "Generic (PLEG): container finished" podID="534e3524-8e4b-474e-b14e-2da113cec158" containerID="7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b" exitCode=0 Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.647551 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" event={"ID":"534e3524-8e4b-474e-b14e-2da113cec158","Type":"ContainerDied","Data":"7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b"} Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.656176 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-jdjs9" event={"ID":"a85cf818-0fdc-4438-8357-38be4a980937","Type":"ContainerStarted","Data":"7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90"} Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.670188 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.683209 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.698817 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.713487 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.728763 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.743781 4946 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.744438 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.748526 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.748579 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.748593 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.748744 4946 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.754514 4946 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.754817 4946 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.756024 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.756053 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.756064 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.756079 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.756090 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:55Z","lastTransitionTime":"2025-12-04T15:02:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.758264 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.775899 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: E1204 15:02:55.779613 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.784396 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.784436 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.784446 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.784463 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.784473 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:55Z","lastTransitionTime":"2025-12-04T15:02:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.798846 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.811860 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: E1204 15:02:55.819537 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.823747 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.823775 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.823784 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.823797 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.823808 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:55Z","lastTransitionTime":"2025-12-04T15:02:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.843800 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: E1204 15:02:55.857753 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.861322 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.861347 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.861355 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.861369 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.861380 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:55Z","lastTransitionTime":"2025-12-04T15:02:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.866210 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.882512 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: E1204 15:02:55.883496 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.888288 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.888320 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.888329 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.888346 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.888355 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:55Z","lastTransitionTime":"2025-12-04T15:02:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.896737 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: E1204 15:02:55.901761 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: E1204 15:02:55.901873 4946 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.905670 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.905844 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.906412 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.906506 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.906605 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:55Z","lastTransitionTime":"2025-12-04T15:02:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.911301 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.927705 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.945472 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.959266 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.972109 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:55 crc kubenswrapper[4946]: I1204 15:02:55.986232 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.005811 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.009600 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.009644 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.009659 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.009679 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.009704 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:56Z","lastTransitionTime":"2025-12-04T15:02:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.018400 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.030005 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.042862 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.055080 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.064263 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.075880 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.099187 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.112231 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.112282 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.112296 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.112315 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.112327 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:56Z","lastTransitionTime":"2025-12-04T15:02:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.215004 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.215053 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.215071 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.215097 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.215148 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:56Z","lastTransitionTime":"2025-12-04T15:02:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.318007 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.318069 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.318085 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.318108 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.318143 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:56Z","lastTransitionTime":"2025-12-04T15:02:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.420913 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.421254 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.421269 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.421289 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.421302 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:56Z","lastTransitionTime":"2025-12-04T15:02:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.452629 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.452741 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:02:56 crc kubenswrapper[4946]: E1204 15:02:56.452785 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.452738 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:56 crc kubenswrapper[4946]: E1204 15:02:56.452891 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:02:56 crc kubenswrapper[4946]: E1204 15:02:56.453053 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.525943 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.526001 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.526017 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.526039 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.526051 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:56Z","lastTransitionTime":"2025-12-04T15:02:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.628533 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.628576 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.628585 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.628601 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.628610 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:56Z","lastTransitionTime":"2025-12-04T15:02:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.661927 4946 generic.go:334] "Generic (PLEG): container finished" podID="534e3524-8e4b-474e-b14e-2da113cec158" containerID="08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724" exitCode=0 Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.662023 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" event={"ID":"534e3524-8e4b-474e-b14e-2da113cec158","Type":"ContainerDied","Data":"08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724"} Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.670654 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.673570 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631"} Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.675730 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.686708 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.699968 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.722469 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.733543 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.733618 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.733634 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.733659 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.733676 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:56Z","lastTransitionTime":"2025-12-04T15:02:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.734542 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.752813 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.774047 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.794102 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.808296 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.823742 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.835920 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.837014 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.837063 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.837076 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.837095 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.837107 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:56Z","lastTransitionTime":"2025-12-04T15:02:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.849075 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.876403 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.896266 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.939217 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.939269 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.939279 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.939296 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:56 crc kubenswrapper[4946]: I1204 15:02:56.939309 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:56Z","lastTransitionTime":"2025-12-04T15:02:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.042741 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.042797 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.042808 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.042828 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.042840 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:57Z","lastTransitionTime":"2025-12-04T15:02:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.059224 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.059447 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:03:05.059409876 +0000 UTC m=+35.945453527 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.145300 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.145335 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.145346 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.145363 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.145373 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:57Z","lastTransitionTime":"2025-12-04T15:02:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.160187 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.160255 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.160293 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160262 4946 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.160356 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160408 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160425 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160435 4946 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160473 4946 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160487 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:05.16043883 +0000 UTC m=+36.046482491 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160510 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160516 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:05.160503822 +0000 UTC m=+36.046547473 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160521 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160534 4946 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160538 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:05.160528922 +0000 UTC m=+36.046572573 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:02:57 crc kubenswrapper[4946]: E1204 15:02:57.160567 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:05.160549653 +0000 UTC m=+36.046593294 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.248217 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.248257 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.248268 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.248287 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.248297 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:57Z","lastTransitionTime":"2025-12-04T15:02:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.352668 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.352738 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.352758 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.352788 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.352808 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:57Z","lastTransitionTime":"2025-12-04T15:02:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.456653 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.456733 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.456758 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.456801 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.456829 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:57Z","lastTransitionTime":"2025-12-04T15:02:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.559911 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.559968 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.559983 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.560011 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.560027 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:57Z","lastTransitionTime":"2025-12-04T15:02:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.663092 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.663208 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.663242 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.663278 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.663299 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:57Z","lastTransitionTime":"2025-12-04T15:02:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.682625 4946 generic.go:334] "Generic (PLEG): container finished" podID="534e3524-8e4b-474e-b14e-2da113cec158" containerID="87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a" exitCode=0 Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.682720 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" event={"ID":"534e3524-8e4b-474e-b14e-2da113cec158","Type":"ContainerDied","Data":"87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a"} Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.715704 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.732808 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.750635 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.762540 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.767008 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.767140 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.767160 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.767179 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.767192 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:57Z","lastTransitionTime":"2025-12-04T15:02:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.779169 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.791651 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.805701 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.819658 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.835526 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.853422 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.869286 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.870304 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.870333 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.870346 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.870367 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.870379 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:57Z","lastTransitionTime":"2025-12-04T15:02:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.883982 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.897236 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.915413 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.972312 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.972741 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.972753 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.972773 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:57 crc kubenswrapper[4946]: I1204 15:02:57.972787 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:57Z","lastTransitionTime":"2025-12-04T15:02:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.076400 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.076447 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.076460 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.076478 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.076490 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:58Z","lastTransitionTime":"2025-12-04T15:02:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.178724 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.178764 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.178779 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.178795 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.178806 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:58Z","lastTransitionTime":"2025-12-04T15:02:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.281297 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.281357 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.281374 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.281400 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.281418 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:58Z","lastTransitionTime":"2025-12-04T15:02:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.384082 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.384187 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.384209 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.384244 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.384282 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:58Z","lastTransitionTime":"2025-12-04T15:02:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.452005 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.452102 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.452006 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:02:58 crc kubenswrapper[4946]: E1204 15:02:58.452212 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:02:58 crc kubenswrapper[4946]: E1204 15:02:58.452341 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:02:58 crc kubenswrapper[4946]: E1204 15:02:58.452504 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.487214 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.487267 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.487284 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.487304 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.487323 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:58Z","lastTransitionTime":"2025-12-04T15:02:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.589851 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.589909 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.589926 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.589949 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.589961 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:58Z","lastTransitionTime":"2025-12-04T15:02:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.693199 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.693244 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.693254 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.693272 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.693284 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:58Z","lastTransitionTime":"2025-12-04T15:02:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.697002 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" event={"ID":"534e3524-8e4b-474e-b14e-2da113cec158","Type":"ContainerStarted","Data":"e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.701201 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.701836 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.702161 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.702203 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.702261 4946 scope.go:117] "RemoveContainer" containerID="bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.714842 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.733051 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.745798 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.749058 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.750265 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.764986 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.806526 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.809454 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.809569 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.809647 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.809717 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.809799 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:58Z","lastTransitionTime":"2025-12-04T15:02:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.822951 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.837471 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.855790 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.875247 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.891898 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.907644 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.918774 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.918830 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.918839 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.918861 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.918871 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:58Z","lastTransitionTime":"2025-12-04T15:02:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.924394 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.942238 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.956447 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.973066 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:58 crc kubenswrapper[4946]: I1204 15:02:58.987802 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.002901 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:58Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.017334 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.022318 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.022345 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.022355 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.022370 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.022382 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:59Z","lastTransitionTime":"2025-12-04T15:02:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.043409 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.074474 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.085609 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.098739 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.108148 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.118832 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.124540 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.124578 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.124586 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.124603 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.124614 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:59Z","lastTransitionTime":"2025-12-04T15:02:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.132521 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.144621 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.159560 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.176543 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.227662 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.227707 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.227718 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.227744 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.227757 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:59Z","lastTransitionTime":"2025-12-04T15:02:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.298533 4946 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.330548 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.330622 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.330635 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.330656 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.330668 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:59Z","lastTransitionTime":"2025-12-04T15:02:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.434409 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.434464 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.434481 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.434501 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.434513 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:59Z","lastTransitionTime":"2025-12-04T15:02:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.469866 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.484968 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.496256 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.507904 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.521595 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.536943 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.537008 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.537023 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.537045 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.537061 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:59Z","lastTransitionTime":"2025-12-04T15:02:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.537984 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.554751 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.571075 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.585075 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.601241 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.617416 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.633716 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.639227 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.639271 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.639300 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.639357 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.639371 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:59Z","lastTransitionTime":"2025-12-04T15:02:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.659731 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.674380 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.715909 4946 generic.go:334] "Generic (PLEG): container finished" podID="534e3524-8e4b-474e-b14e-2da113cec158" containerID="e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb" exitCode=0 Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.715980 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" event={"ID":"534e3524-8e4b-474e-b14e-2da113cec158","Type":"ContainerDied","Data":"e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.723411 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.724677 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.724889 4946 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.735559 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.742400 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.742454 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.742467 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.742488 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.742501 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:59Z","lastTransitionTime":"2025-12-04T15:02:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.750448 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.763049 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.776260 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.789839 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.806987 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.822866 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.835861 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.845898 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.845945 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.845955 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.845975 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.845985 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:59Z","lastTransitionTime":"2025-12-04T15:02:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.851221 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.866664 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.880758 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.891511 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.904402 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.923856 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.941630 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.949310 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.949352 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.949362 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.949381 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.949392 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:02:59Z","lastTransitionTime":"2025-12-04T15:02:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.953531 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.965776 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.979537 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:02:59 crc kubenswrapper[4946]: I1204 15:02:59.993995 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:02:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.016541 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.038173 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.051164 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.051217 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.051226 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.051242 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.051251 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:00Z","lastTransitionTime":"2025-12-04T15:03:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.061194 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.078931 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.093109 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.107911 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.122331 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.136579 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.150015 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.153880 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.153922 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.153933 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.153954 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.153966 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:00Z","lastTransitionTime":"2025-12-04T15:03:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.256268 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.256317 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.256329 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.256347 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.256359 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:00Z","lastTransitionTime":"2025-12-04T15:03:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.359018 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.359060 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.359069 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.359083 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.359092 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:00Z","lastTransitionTime":"2025-12-04T15:03:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.451972 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.451995 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:00 crc kubenswrapper[4946]: E1204 15:03:00.452197 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.452218 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:00 crc kubenswrapper[4946]: E1204 15:03:00.452302 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:00 crc kubenswrapper[4946]: E1204 15:03:00.452401 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.461096 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.461143 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.461152 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.461165 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.461200 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:00Z","lastTransitionTime":"2025-12-04T15:03:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.564062 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.564099 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.564132 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.564150 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.564163 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:00Z","lastTransitionTime":"2025-12-04T15:03:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.667064 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.667092 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.667101 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.667135 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.667155 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:00Z","lastTransitionTime":"2025-12-04T15:03:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.730217 4946 generic.go:334] "Generic (PLEG): container finished" podID="534e3524-8e4b-474e-b14e-2da113cec158" containerID="c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a" exitCode=0 Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.730269 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" event={"ID":"534e3524-8e4b-474e-b14e-2da113cec158","Type":"ContainerDied","Data":"c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a"} Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.730371 4946 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.745025 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.756673 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.768585 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.769790 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.769821 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.769830 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.769845 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.769854 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:00Z","lastTransitionTime":"2025-12-04T15:03:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.779214 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.790719 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.809642 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.827144 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.844836 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.860743 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.878660 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.878759 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.878775 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.878806 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.878825 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:00Z","lastTransitionTime":"2025-12-04T15:03:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.882417 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.906557 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.921458 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.943700 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.958159 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:00Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.982800 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.982871 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.982888 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.982950 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:00 crc kubenswrapper[4946]: I1204 15:03:00.982967 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:00Z","lastTransitionTime":"2025-12-04T15:03:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.086901 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.086974 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.086995 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.087021 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.087039 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:01Z","lastTransitionTime":"2025-12-04T15:03:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.189665 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.189716 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.189729 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.189747 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.189760 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:01Z","lastTransitionTime":"2025-12-04T15:03:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.292111 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.292195 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.292208 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.292233 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.292248 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:01Z","lastTransitionTime":"2025-12-04T15:03:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.395144 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.395501 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.395514 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.395533 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.395549 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:01Z","lastTransitionTime":"2025-12-04T15:03:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.498980 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.499020 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.499028 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.499044 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.499054 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:01Z","lastTransitionTime":"2025-12-04T15:03:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.601426 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.601521 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.601537 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.601564 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.601583 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:01Z","lastTransitionTime":"2025-12-04T15:03:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.704494 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.704538 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.704547 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.704563 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.704574 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:01Z","lastTransitionTime":"2025-12-04T15:03:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.744542 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" event={"ID":"534e3524-8e4b-474e-b14e-2da113cec158","Type":"ContainerStarted","Data":"72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7"} Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.763473 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.783420 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.799148 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.807549 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.807613 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.807657 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.807682 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.807697 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:01Z","lastTransitionTime":"2025-12-04T15:03:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.821935 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.842739 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.858740 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.880651 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.903220 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.909821 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.909877 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.909889 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.909910 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.909924 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:01Z","lastTransitionTime":"2025-12-04T15:03:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.921396 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.938887 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.954408 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.967988 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:01 crc kubenswrapper[4946]: I1204 15:03:01.985960 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:01Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.008154 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.013083 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.013208 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.013235 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.013269 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.013292 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:02Z","lastTransitionTime":"2025-12-04T15:03:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.115982 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.116091 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.116111 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.116171 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.116192 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:02Z","lastTransitionTime":"2025-12-04T15:03:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.185256 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.218999 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.219055 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.219074 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.219098 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.219144 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:02Z","lastTransitionTime":"2025-12-04T15:03:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.322648 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.322712 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.322730 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.322755 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.322773 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:02Z","lastTransitionTime":"2025-12-04T15:03:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.425482 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.425553 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.425577 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.425604 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.425622 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:02Z","lastTransitionTime":"2025-12-04T15:03:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.439628 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.452286 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.452350 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.452372 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:02 crc kubenswrapper[4946]: E1204 15:03:02.452522 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:02 crc kubenswrapper[4946]: E1204 15:03:02.452598 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:02 crc kubenswrapper[4946]: E1204 15:03:02.452742 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.455215 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.470400 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.489049 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.507815 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.520616 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.528570 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.528644 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.528656 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.528677 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.528690 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:02Z","lastTransitionTime":"2025-12-04T15:03:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.539684 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.556653 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.574329 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.594599 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.608507 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.625559 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.631245 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.631280 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.631290 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.631307 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.631317 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:02Z","lastTransitionTime":"2025-12-04T15:03:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.638881 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.656147 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.677167 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:02Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.734550 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.734608 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.734625 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.734661 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.734679 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:02Z","lastTransitionTime":"2025-12-04T15:03:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.837360 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.837399 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.837408 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.837429 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.837446 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:02Z","lastTransitionTime":"2025-12-04T15:03:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.940260 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.940336 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.940359 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.940388 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:02 crc kubenswrapper[4946]: I1204 15:03:02.940411 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:02Z","lastTransitionTime":"2025-12-04T15:03:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.043394 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.043467 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.043488 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.043517 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.043539 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:03Z","lastTransitionTime":"2025-12-04T15:03:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.146709 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.146762 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.146780 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.146804 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.146822 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:03Z","lastTransitionTime":"2025-12-04T15:03:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.250056 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.250161 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.250180 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.250205 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.250224 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:03Z","lastTransitionTime":"2025-12-04T15:03:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.352867 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.352934 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.352968 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.352998 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.353018 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:03Z","lastTransitionTime":"2025-12-04T15:03:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.455973 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.456388 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.456639 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.456864 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.457050 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:03Z","lastTransitionTime":"2025-12-04T15:03:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.531244 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs"] Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.532431 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.535481 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.535670 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.555320 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.560281 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.560316 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.560328 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.560347 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.560359 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:03Z","lastTransitionTime":"2025-12-04T15:03:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.576582 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.592977 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.613138 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.630624 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ebe89772-ac8c-413e-93a0-3e230b3746da-env-overrides\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.630680 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ebe89772-ac8c-413e-93a0-3e230b3746da-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.630699 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ebe89772-ac8c-413e-93a0-3e230b3746da-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.630715 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x9kt\" (UniqueName: \"kubernetes.io/projected/ebe89772-ac8c-413e-93a0-3e230b3746da-kube-api-access-8x9kt\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.633264 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.648347 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.663476 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.663817 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.664004 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.664168 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.664320 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:03Z","lastTransitionTime":"2025-12-04T15:03:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.665059 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.682305 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.704562 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.721757 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.732249 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ebe89772-ac8c-413e-93a0-3e230b3746da-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.732549 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ebe89772-ac8c-413e-93a0-3e230b3746da-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.732733 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x9kt\" (UniqueName: \"kubernetes.io/projected/ebe89772-ac8c-413e-93a0-3e230b3746da-kube-api-access-8x9kt\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.732960 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ebe89772-ac8c-413e-93a0-3e230b3746da-env-overrides\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.733281 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ebe89772-ac8c-413e-93a0-3e230b3746da-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.733806 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ebe89772-ac8c-413e-93a0-3e230b3746da-env-overrides\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.738090 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.741680 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ebe89772-ac8c-413e-93a0-3e230b3746da-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.751340 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x9kt\" (UniqueName: \"kubernetes.io/projected/ebe89772-ac8c-413e-93a0-3e230b3746da-kube-api-access-8x9kt\") pod \"ovnkube-control-plane-749d76644c-k58zs\" (UID: \"ebe89772-ac8c-413e-93a0-3e230b3746da\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.757080 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.767506 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.767579 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.767597 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.767625 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.767648 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:03Z","lastTransitionTime":"2025-12-04T15:03:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.775472 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.789345 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.815594 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:03Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.855210 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.871257 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.871319 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.871332 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.871356 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.871370 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:03Z","lastTransitionTime":"2025-12-04T15:03:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:03 crc kubenswrapper[4946]: W1204 15:03:03.872567 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebe89772_ac8c_413e_93a0_3e230b3746da.slice/crio-a9afc4d92985937a8c1e4a075da60a8ea50511c4d5518a48f015d388922ae216 WatchSource:0}: Error finding container a9afc4d92985937a8c1e4a075da60a8ea50511c4d5518a48f015d388922ae216: Status 404 returned error can't find the container with id a9afc4d92985937a8c1e4a075da60a8ea50511c4d5518a48f015d388922ae216 Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.974361 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.974412 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.974492 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.974525 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:03 crc kubenswrapper[4946]: I1204 15:03:03.974539 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:03Z","lastTransitionTime":"2025-12-04T15:03:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.077788 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.078049 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.078076 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.078097 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.078141 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:04Z","lastTransitionTime":"2025-12-04T15:03:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.181325 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.181419 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.181451 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.181501 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.181527 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:04Z","lastTransitionTime":"2025-12-04T15:03:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.285361 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.285426 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.285447 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.285472 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.285490 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:04Z","lastTransitionTime":"2025-12-04T15:03:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.388935 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.389003 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.389027 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.389052 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.389071 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:04Z","lastTransitionTime":"2025-12-04T15:03:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.451900 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.451974 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.452013 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:04 crc kubenswrapper[4946]: E1204 15:03:04.452080 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:04 crc kubenswrapper[4946]: E1204 15:03:04.452243 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:04 crc kubenswrapper[4946]: E1204 15:03:04.452427 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.492152 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.492228 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.492275 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.492297 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.492308 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:04Z","lastTransitionTime":"2025-12-04T15:03:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.595484 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.595542 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.595556 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.595576 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.595590 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:04Z","lastTransitionTime":"2025-12-04T15:03:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.697875 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.698209 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.698221 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.698237 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.698250 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:04Z","lastTransitionTime":"2025-12-04T15:03:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.755170 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" event={"ID":"ebe89772-ac8c-413e-93a0-3e230b3746da","Type":"ContainerStarted","Data":"e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.755237 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" event={"ID":"ebe89772-ac8c-413e-93a0-3e230b3746da","Type":"ContainerStarted","Data":"a9afc4d92985937a8c1e4a075da60a8ea50511c4d5518a48f015d388922ae216"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.758205 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/0.log" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.761204 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.762326 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464" exitCode=1 Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.762372 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.763204 4946 scope.go:117] "RemoveContainer" containerID="19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.779297 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.801283 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.801322 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.801331 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.801345 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.801356 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:04Z","lastTransitionTime":"2025-12-04T15:03:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.801532 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.815273 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.830392 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.846673 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.860873 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.884966 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:04Z\\\",\\\"message\\\":\\\"01.592478 6192 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 15:03:01.592521 6192 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 15:03:01.592541 6192 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 15:03:01.592547 6192 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1204 15:03:01.592594 6192 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 15:03:01.592616 6192 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 15:03:01.592642 6192 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 15:03:01.592665 6192 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 15:03:01.592509 6192 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 15:03:01.592692 6192 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1204 15:03:01.592395 6192 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 15:03:01.593057 6192 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1204 15:03:01.593107 6192 factory.go:656] Stopping watch factory\\\\nI1204 15:03:01.593153 6192 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.898137 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.903858 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.903901 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.903911 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.903929 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.903939 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:04Z","lastTransitionTime":"2025-12-04T15:03:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.910405 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.920215 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.933210 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.945020 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.958878 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.975694 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:04 crc kubenswrapper[4946]: I1204 15:03:04.989655 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:04Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.006778 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.006834 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.006846 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.006868 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.006882 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:05Z","lastTransitionTime":"2025-12-04T15:03:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.059539 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.059598 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:03:21.059574531 +0000 UTC m=+51.945618162 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.107660 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-9xbtr"] Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.108715 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.108905 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.109664 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.109724 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.109734 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.109749 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.109777 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:05Z","lastTransitionTime":"2025-12-04T15:03:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.128808 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.145600 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.160941 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.161468 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.161597 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.161709 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.161178 4946 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.160968 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.161559 4946 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.161720 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.162234 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.162246 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:21.162193989 +0000 UTC m=+52.048237630 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.162254 4946 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.162313 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:21.162305792 +0000 UTC m=+52.048349433 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.161808 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.162388 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.162405 4946 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.162456 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:21.162442536 +0000 UTC m=+52.048486177 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.162557 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:21.162527669 +0000 UTC m=+52.048571320 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.180998 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.194244 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.207798 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.212598 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.212648 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.212666 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.212688 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.212701 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:05Z","lastTransitionTime":"2025-12-04T15:03:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.226154 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.241513 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.263041 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:04Z\\\",\\\"message\\\":\\\"01.592478 6192 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 15:03:01.592521 6192 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 15:03:01.592541 6192 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 15:03:01.592547 6192 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1204 15:03:01.592594 6192 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 15:03:01.592616 6192 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 15:03:01.592642 6192 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 15:03:01.592665 6192 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 15:03:01.592509 6192 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 15:03:01.592692 6192 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1204 15:03:01.592395 6192 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 15:03:01.593057 6192 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1204 15:03:01.593107 6192 factory.go:656] Stopping watch factory\\\\nI1204 15:03:01.593153 6192 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.263338 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6hcl\" (UniqueName: \"kubernetes.io/projected/0a3cccbb-17c2-487d-a952-6b5d50656e2a-kube-api-access-s6hcl\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.263405 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.275625 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.287803 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.300628 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.315283 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.315334 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.315354 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.315377 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.315394 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:05Z","lastTransitionTime":"2025-12-04T15:03:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.318550 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.330895 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.343743 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.358321 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:05Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.364796 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6hcl\" (UniqueName: \"kubernetes.io/projected/0a3cccbb-17c2-487d-a952-6b5d50656e2a-kube-api-access-s6hcl\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.364890 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.365090 4946 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.365234 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs podName:0a3cccbb-17c2-487d-a952-6b5d50656e2a nodeName:}" failed. No retries permitted until 2025-12-04 15:03:05.865204434 +0000 UTC m=+36.751248075 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs") pod "network-metrics-daemon-9xbtr" (UID: "0a3cccbb-17c2-487d-a952-6b5d50656e2a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.387520 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6hcl\" (UniqueName: \"kubernetes.io/projected/0a3cccbb-17c2-487d-a952-6b5d50656e2a-kube-api-access-s6hcl\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.418660 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.418718 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.418729 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.418745 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.418756 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:05Z","lastTransitionTime":"2025-12-04T15:03:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.521768 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.521835 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.521847 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.521870 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.521884 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:05Z","lastTransitionTime":"2025-12-04T15:03:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.624530 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.624574 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.624583 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.624598 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.624607 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:05Z","lastTransitionTime":"2025-12-04T15:03:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.728896 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.728931 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.728940 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.728953 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.728963 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:05Z","lastTransitionTime":"2025-12-04T15:03:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.769744 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" event={"ID":"ebe89772-ac8c-413e-93a0-3e230b3746da","Type":"ContainerStarted","Data":"2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52"} Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.831589 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.831637 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.831648 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.831667 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.831680 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:05Z","lastTransitionTime":"2025-12-04T15:03:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.869295 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.869446 4946 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: E1204 15:03:05.869518 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs podName:0a3cccbb-17c2-487d-a952-6b5d50656e2a nodeName:}" failed. No retries permitted until 2025-12-04 15:03:06.86950015 +0000 UTC m=+37.755543791 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs") pod "network-metrics-daemon-9xbtr" (UID: "0a3cccbb-17c2-487d-a952-6b5d50656e2a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.935470 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.935519 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.935530 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.935547 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:05 crc kubenswrapper[4946]: I1204 15:03:05.935558 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:05Z","lastTransitionTime":"2025-12-04T15:03:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.038322 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.038376 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.038386 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.038404 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.038418 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.099653 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.099693 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.099704 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.099721 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.099732 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: E1204 15:03:06.113783 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.118816 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.118885 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.118899 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.118928 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.118942 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: E1204 15:03:06.138375 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.143174 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.143204 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.143214 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.143228 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.143238 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: E1204 15:03:06.159728 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.164845 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.164985 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.165243 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.165473 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.165692 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: E1204 15:03:06.178744 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.183089 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.183294 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.183391 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.183484 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.183553 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: E1204 15:03:06.195162 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: E1204 15:03:06.195546 4946 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.197480 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.197586 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.197668 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.197738 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.197796 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.301524 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.301815 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.301899 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.301982 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.302214 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.405777 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.405822 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.405835 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.405856 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.405869 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.452063 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:06 crc kubenswrapper[4946]: E1204 15:03:06.452231 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.452340 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.452361 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:06 crc kubenswrapper[4946]: E1204 15:03:06.452568 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:06 crc kubenswrapper[4946]: E1204 15:03:06.452648 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.509749 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.509797 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.509806 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.509825 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.509841 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.612530 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.612568 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.612578 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.612590 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.612599 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.715221 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.715271 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.715283 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.715307 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.715321 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.775503 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/0.log" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.777644 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.778736 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850"} Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.779483 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.792502 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.806002 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.817707 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.817749 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.817763 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.817780 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.817793 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.830875 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:04Z\\\",\\\"message\\\":\\\"01.592478 6192 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 15:03:01.592521 6192 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 15:03:01.592541 6192 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 15:03:01.592547 6192 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1204 15:03:01.592594 6192 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 15:03:01.592616 6192 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 15:03:01.592642 6192 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 15:03:01.592665 6192 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 15:03:01.592509 6192 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 15:03:01.592692 6192 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1204 15:03:01.592395 6192 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 15:03:01.593057 6192 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1204 15:03:01.593107 6192 factory.go:656] Stopping watch factory\\\\nI1204 15:03:01.593153 6192 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.843778 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.855398 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.867297 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.878757 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:06 crc kubenswrapper[4946]: E1204 15:03:06.878973 4946 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:06 crc kubenswrapper[4946]: E1204 15:03:06.879072 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs podName:0a3cccbb-17c2-487d-a952-6b5d50656e2a nodeName:}" failed. No retries permitted until 2025-12-04 15:03:08.879043878 +0000 UTC m=+39.765087519 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs") pod "network-metrics-daemon-9xbtr" (UID: "0a3cccbb-17c2-487d-a952-6b5d50656e2a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.881880 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.900354 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.912823 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.920859 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.920911 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.920922 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.920943 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.920955 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:06Z","lastTransitionTime":"2025-12-04T15:03:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.928085 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.943055 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.957074 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.972433 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:06 crc kubenswrapper[4946]: I1204 15:03:06.986756 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.000354 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.015667 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.024639 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.024692 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.024704 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.024737 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.024749 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:07Z","lastTransitionTime":"2025-12-04T15:03:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.033506 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.049770 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.065845 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.081815 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.102321 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:04Z\\\",\\\"message\\\":\\\"01.592478 6192 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 15:03:01.592521 6192 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 15:03:01.592541 6192 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 15:03:01.592547 6192 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1204 15:03:01.592594 6192 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 15:03:01.592616 6192 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 15:03:01.592642 6192 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 15:03:01.592665 6192 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 15:03:01.592509 6192 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 15:03:01.592692 6192 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1204 15:03:01.592395 6192 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 15:03:01.593057 6192 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1204 15:03:01.593107 6192 factory.go:656] Stopping watch factory\\\\nI1204 15:03:01.593153 6192 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.124932 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.126841 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.126879 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.126889 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.126905 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.126919 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:07Z","lastTransitionTime":"2025-12-04T15:03:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.160393 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.177608 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.192218 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.203825 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.215728 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.227335 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.228812 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.228850 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.228860 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.228877 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.228889 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:07Z","lastTransitionTime":"2025-12-04T15:03:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.239016 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.251652 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.261882 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.275821 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.331699 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.331757 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.331775 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.331798 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.331814 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:07Z","lastTransitionTime":"2025-12-04T15:03:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.436084 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.436148 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.436162 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.436181 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.436195 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:07Z","lastTransitionTime":"2025-12-04T15:03:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.452552 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:07 crc kubenswrapper[4946]: E1204 15:03:07.452705 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.539223 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.539268 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.539284 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.539302 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.539315 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:07Z","lastTransitionTime":"2025-12-04T15:03:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.641831 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.641887 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.641901 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.641921 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.641935 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:07Z","lastTransitionTime":"2025-12-04T15:03:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.745749 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.745831 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.745848 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.746289 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.746363 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:07Z","lastTransitionTime":"2025-12-04T15:03:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.783890 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/1.log" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.784892 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/0.log" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.786905 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.787762 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850" exitCode=1 Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.787817 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850"} Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.787871 4946 scope.go:117] "RemoveContainer" containerID="19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.788445 4946 scope.go:117] "RemoveContainer" containerID="ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850" Dec 04 15:03:07 crc kubenswrapper[4946]: E1204 15:03:07.788730 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.805902 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.821094 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.835724 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.850110 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.850162 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.850171 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.850190 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.850202 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:07Z","lastTransitionTime":"2025-12-04T15:03:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.852688 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.872047 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.894397 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.925088 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19dd3849b62ab7ff4f01bccf69ca9f167c0282168c2e9cb941ea5ccd66b9b464\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:04Z\\\",\\\"message\\\":\\\"01.592478 6192 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1204 15:03:01.592521 6192 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1204 15:03:01.592541 6192 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 15:03:01.592547 6192 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1204 15:03:01.592594 6192 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 15:03:01.592616 6192 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1204 15:03:01.592642 6192 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 15:03:01.592665 6192 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 15:03:01.592509 6192 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 15:03:01.592692 6192 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1204 15:03:01.592395 6192 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 15:03:01.593057 6192 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1204 15:03:01.593107 6192 factory.go:656] Stopping watch factory\\\\nI1204 15:03:01.593153 6192 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:07Z\\\",\\\"message\\\":\\\"vices.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:06.746734 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:06.746752 6418 services_controller.go:360] Finished syncing service machine-config-operator on namespace openshift-machine-config-operator for network=default : 3.63617ms\\\\nI1204 15:03:06.746757 6418 services_controller.go:452] Built service\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.940344 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.952782 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.952848 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.952867 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.952895 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.952917 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:07Z","lastTransitionTime":"2025-12-04T15:03:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.955809 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.967208 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.984808 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:07 crc kubenswrapper[4946]: I1204 15:03:07.997639 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.012495 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.026009 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.041130 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.056290 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.056324 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.056334 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.056353 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.056365 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:08Z","lastTransitionTime":"2025-12-04T15:03:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.057326 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.158476 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.158531 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.158543 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.158561 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.158573 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:08Z","lastTransitionTime":"2025-12-04T15:03:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.261330 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.261374 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.261386 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.261403 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.261413 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:08Z","lastTransitionTime":"2025-12-04T15:03:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.364201 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.364268 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.364281 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.364306 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.364319 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:08Z","lastTransitionTime":"2025-12-04T15:03:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.452084 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.452126 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.452150 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:08 crc kubenswrapper[4946]: E1204 15:03:08.452909 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:08 crc kubenswrapper[4946]: E1204 15:03:08.453034 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:08 crc kubenswrapper[4946]: E1204 15:03:08.453231 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.467335 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.467395 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.467409 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.467434 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.467450 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:08Z","lastTransitionTime":"2025-12-04T15:03:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.570918 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.570972 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.570986 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.571013 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.571028 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:08Z","lastTransitionTime":"2025-12-04T15:03:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.673568 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.674040 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.674247 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.674456 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.674575 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:08Z","lastTransitionTime":"2025-12-04T15:03:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.777327 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.777699 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.777797 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.777896 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.778173 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:08Z","lastTransitionTime":"2025-12-04T15:03:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.794554 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/1.log" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.797785 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.799943 4946 scope.go:117] "RemoveContainer" containerID="ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850" Dec 04 15:03:08 crc kubenswrapper[4946]: E1204 15:03:08.800150 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.816921 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.833485 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.848532 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.861766 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.876405 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.881893 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.881929 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.881941 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.881959 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.881971 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:08Z","lastTransitionTime":"2025-12-04T15:03:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.894217 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.898973 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:08 crc kubenswrapper[4946]: E1204 15:03:08.899258 4946 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:08 crc kubenswrapper[4946]: E1204 15:03:08.899377 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs podName:0a3cccbb-17c2-487d-a952-6b5d50656e2a nodeName:}" failed. No retries permitted until 2025-12-04 15:03:12.899343698 +0000 UTC m=+43.785387419 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs") pod "network-metrics-daemon-9xbtr" (UID: "0a3cccbb-17c2-487d-a952-6b5d50656e2a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.909820 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.924351 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.942315 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.955772 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.969579 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.985362 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.985399 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.985412 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.985429 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.985442 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:08Z","lastTransitionTime":"2025-12-04T15:03:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:08 crc kubenswrapper[4946]: I1204 15:03:08.990594 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.007548 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.022266 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.042748 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:07Z\\\",\\\"message\\\":\\\"vices.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:06.746734 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:06.746752 6418 services_controller.go:360] Finished syncing service machine-config-operator on namespace openshift-machine-config-operator for network=default : 3.63617ms\\\\nI1204 15:03:06.746757 6418 services_controller.go:452] Built service\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.056191 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.088031 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.088073 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.088083 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.088097 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.088107 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:09Z","lastTransitionTime":"2025-12-04T15:03:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.191312 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.191393 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.191416 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.191450 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.191472 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:09Z","lastTransitionTime":"2025-12-04T15:03:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.294210 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.294293 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.294316 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.294347 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.294372 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:09Z","lastTransitionTime":"2025-12-04T15:03:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.397513 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.397581 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.397595 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.397619 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.397635 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:09Z","lastTransitionTime":"2025-12-04T15:03:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.451906 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:09 crc kubenswrapper[4946]: E1204 15:03:09.452157 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.469830 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.484056 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.500151 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.500214 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.500259 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.500273 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.500295 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.500309 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:09Z","lastTransitionTime":"2025-12-04T15:03:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.512654 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.525477 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.539203 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.555312 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.574285 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.591035 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.603592 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.603927 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.604005 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.604095 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.604210 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:09Z","lastTransitionTime":"2025-12-04T15:03:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.614541 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.633862 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.652961 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.665474 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.678768 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.701229 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:07Z\\\",\\\"message\\\":\\\"vices.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:06.746734 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:06.746752 6418 services_controller.go:360] Finished syncing service machine-config-operator on namespace openshift-machine-config-operator for network=default : 3.63617ms\\\\nI1204 15:03:06.746757 6418 services_controller.go:452] Built service\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.707492 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.707603 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.707665 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.707725 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.707790 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:09Z","lastTransitionTime":"2025-12-04T15:03:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.723949 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.809988 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.810045 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.810062 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.810088 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.810106 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:09Z","lastTransitionTime":"2025-12-04T15:03:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.912750 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.912796 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.912808 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.912827 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:09 crc kubenswrapper[4946]: I1204 15:03:09.912841 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:09Z","lastTransitionTime":"2025-12-04T15:03:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.016533 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.016964 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.017048 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.017131 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.017195 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:10Z","lastTransitionTime":"2025-12-04T15:03:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.120494 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.120540 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.120552 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.120573 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.120586 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:10Z","lastTransitionTime":"2025-12-04T15:03:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.223604 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.223671 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.223693 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.223724 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.223741 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:10Z","lastTransitionTime":"2025-12-04T15:03:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.327222 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.327273 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.327291 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.327315 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.327332 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:10Z","lastTransitionTime":"2025-12-04T15:03:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.430494 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.430576 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.430596 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.430627 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.430649 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:10Z","lastTransitionTime":"2025-12-04T15:03:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.452249 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.452336 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.452261 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:10 crc kubenswrapper[4946]: E1204 15:03:10.452488 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:10 crc kubenswrapper[4946]: E1204 15:03:10.452617 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:10 crc kubenswrapper[4946]: E1204 15:03:10.452877 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.534414 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.534462 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.534475 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.534495 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.534508 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:10Z","lastTransitionTime":"2025-12-04T15:03:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.637664 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.637720 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.637740 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.637764 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.637782 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:10Z","lastTransitionTime":"2025-12-04T15:03:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.740577 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.740969 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.741107 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.741288 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.741413 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:10Z","lastTransitionTime":"2025-12-04T15:03:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.843762 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.843817 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.843830 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.843850 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.843864 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:10Z","lastTransitionTime":"2025-12-04T15:03:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.947344 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.947387 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.947400 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.947417 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:10 crc kubenswrapper[4946]: I1204 15:03:10.947428 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:10Z","lastTransitionTime":"2025-12-04T15:03:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.050913 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.050973 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.050982 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.051002 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.051014 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:11Z","lastTransitionTime":"2025-12-04T15:03:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.153960 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.154023 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.154035 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.154055 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.154068 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:11Z","lastTransitionTime":"2025-12-04T15:03:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.256959 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.257037 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.257071 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.257103 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.257169 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:11Z","lastTransitionTime":"2025-12-04T15:03:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.360390 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.360439 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.360453 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.360506 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.360522 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:11Z","lastTransitionTime":"2025-12-04T15:03:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.453413 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:11 crc kubenswrapper[4946]: E1204 15:03:11.453594 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.464160 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.464219 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.464235 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.464259 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.464274 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:11Z","lastTransitionTime":"2025-12-04T15:03:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.567204 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.567259 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.567270 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.567289 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.567300 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:11Z","lastTransitionTime":"2025-12-04T15:03:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.670426 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.670473 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.670482 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.670501 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.670512 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:11Z","lastTransitionTime":"2025-12-04T15:03:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.773576 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.773633 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.773644 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.773667 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.773681 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:11Z","lastTransitionTime":"2025-12-04T15:03:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.877028 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.877082 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.877092 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.877125 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.877136 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:11Z","lastTransitionTime":"2025-12-04T15:03:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.981195 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.981247 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.981263 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.981285 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:11 crc kubenswrapper[4946]: I1204 15:03:11.981298 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:11Z","lastTransitionTime":"2025-12-04T15:03:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.084889 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.084982 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.085006 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.085043 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.085062 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:12Z","lastTransitionTime":"2025-12-04T15:03:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.188983 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.189052 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.189071 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.189099 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.189146 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:12Z","lastTransitionTime":"2025-12-04T15:03:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.292397 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.292473 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.292491 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.292519 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.292538 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:12Z","lastTransitionTime":"2025-12-04T15:03:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.395981 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.396052 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.396074 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.396108 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.396196 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:12Z","lastTransitionTime":"2025-12-04T15:03:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.452588 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.452664 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.452713 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:12 crc kubenswrapper[4946]: E1204 15:03:12.452819 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:12 crc kubenswrapper[4946]: E1204 15:03:12.453018 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:12 crc kubenswrapper[4946]: E1204 15:03:12.453429 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.499029 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.499082 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.499097 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.499145 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.499162 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:12Z","lastTransitionTime":"2025-12-04T15:03:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.601801 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.601835 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.601843 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.601856 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.601866 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:12Z","lastTransitionTime":"2025-12-04T15:03:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.704366 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.704404 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.704413 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.704428 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.704459 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:12Z","lastTransitionTime":"2025-12-04T15:03:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.808023 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.808163 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.808191 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.808236 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.808264 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:12Z","lastTransitionTime":"2025-12-04T15:03:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.911005 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.911057 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.911069 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.911089 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.911108 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:12Z","lastTransitionTime":"2025-12-04T15:03:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:12 crc kubenswrapper[4946]: I1204 15:03:12.947909 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:12 crc kubenswrapper[4946]: E1204 15:03:12.948194 4946 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:12 crc kubenswrapper[4946]: E1204 15:03:12.948284 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs podName:0a3cccbb-17c2-487d-a952-6b5d50656e2a nodeName:}" failed. No retries permitted until 2025-12-04 15:03:20.948259997 +0000 UTC m=+51.834303668 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs") pod "network-metrics-daemon-9xbtr" (UID: "0a3cccbb-17c2-487d-a952-6b5d50656e2a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.013993 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.014056 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.014065 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.014078 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.014101 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:13Z","lastTransitionTime":"2025-12-04T15:03:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.117924 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.118017 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.118056 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.118088 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.118108 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:13Z","lastTransitionTime":"2025-12-04T15:03:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.221470 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.221548 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.221566 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.221594 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.221616 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:13Z","lastTransitionTime":"2025-12-04T15:03:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.325356 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.325415 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.325427 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.325447 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.325468 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:13Z","lastTransitionTime":"2025-12-04T15:03:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.429168 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.429237 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.429255 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.429284 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.429303 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:13Z","lastTransitionTime":"2025-12-04T15:03:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.452941 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:13 crc kubenswrapper[4946]: E1204 15:03:13.453202 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.532758 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.532830 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.532845 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.532870 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.532883 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:13Z","lastTransitionTime":"2025-12-04T15:03:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.637390 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.637483 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.637503 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.637534 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.637554 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:13Z","lastTransitionTime":"2025-12-04T15:03:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.740481 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.740552 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.740569 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.740594 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.740611 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:13Z","lastTransitionTime":"2025-12-04T15:03:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.844420 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.844520 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.844545 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.844577 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.844598 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:13Z","lastTransitionTime":"2025-12-04T15:03:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.948150 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.948220 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.948243 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.948271 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:13 crc kubenswrapper[4946]: I1204 15:03:13.948293 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:13Z","lastTransitionTime":"2025-12-04T15:03:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.052336 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.052388 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.052408 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.052447 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.052464 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:14Z","lastTransitionTime":"2025-12-04T15:03:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.155675 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.155779 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.155810 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.155854 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.155882 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:14Z","lastTransitionTime":"2025-12-04T15:03:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.258085 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.258164 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.258219 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.258268 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.258284 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:14Z","lastTransitionTime":"2025-12-04T15:03:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.360853 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.360893 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.360902 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.360918 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.360934 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:14Z","lastTransitionTime":"2025-12-04T15:03:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.452615 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.452639 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.452664 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:14 crc kubenswrapper[4946]: E1204 15:03:14.452836 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:14 crc kubenswrapper[4946]: E1204 15:03:14.452957 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:14 crc kubenswrapper[4946]: E1204 15:03:14.453080 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.463733 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.463816 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.463833 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.463887 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.463906 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:14Z","lastTransitionTime":"2025-12-04T15:03:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.566765 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.566817 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.566831 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.566853 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.566867 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:14Z","lastTransitionTime":"2025-12-04T15:03:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.671403 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.671494 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.671522 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.671554 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.671577 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:14Z","lastTransitionTime":"2025-12-04T15:03:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.774482 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.774533 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.774544 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.774563 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.774578 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:14Z","lastTransitionTime":"2025-12-04T15:03:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.877507 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.877549 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.877559 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.877577 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.877591 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:14Z","lastTransitionTime":"2025-12-04T15:03:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.980163 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.980221 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.980234 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.980252 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:14 crc kubenswrapper[4946]: I1204 15:03:14.980261 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:14Z","lastTransitionTime":"2025-12-04T15:03:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.083315 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.083380 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.083395 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.083416 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.083435 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:15Z","lastTransitionTime":"2025-12-04T15:03:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.185934 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.185988 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.185999 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.186026 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.186038 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:15Z","lastTransitionTime":"2025-12-04T15:03:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.289721 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.289830 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.289855 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.289886 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.289910 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:15Z","lastTransitionTime":"2025-12-04T15:03:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.393337 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.393445 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.393460 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.393489 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.393504 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:15Z","lastTransitionTime":"2025-12-04T15:03:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.452212 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:15 crc kubenswrapper[4946]: E1204 15:03:15.452460 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.496778 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.496826 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.496853 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.496876 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.496893 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:15Z","lastTransitionTime":"2025-12-04T15:03:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.599582 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.599666 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.599678 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.599695 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.599708 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:15Z","lastTransitionTime":"2025-12-04T15:03:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.702414 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.702495 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.702508 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.702525 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.702537 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:15Z","lastTransitionTime":"2025-12-04T15:03:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.805663 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.805704 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.805713 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.805729 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.805739 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:15Z","lastTransitionTime":"2025-12-04T15:03:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.908909 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.908989 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.909001 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.909022 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:15 crc kubenswrapper[4946]: I1204 15:03:15.909052 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:15Z","lastTransitionTime":"2025-12-04T15:03:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.012553 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.012595 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.012604 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.012637 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.012648 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.116559 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.116603 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.116616 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.116636 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.116647 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.219345 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.219432 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.219456 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.219499 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.219526 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.322408 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.322487 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.322518 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.322552 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.322570 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.422104 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.422165 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.422174 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.422187 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.422195 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: E1204 15:03:16.434735 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:16Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.439321 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.439409 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.439428 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.439450 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.439467 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.452668 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.452703 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:16 crc kubenswrapper[4946]: E1204 15:03:16.452793 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.452854 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:16 crc kubenswrapper[4946]: E1204 15:03:16.453166 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:16 crc kubenswrapper[4946]: E1204 15:03:16.453032 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:16 crc kubenswrapper[4946]: E1204 15:03:16.455966 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:16Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.460129 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.460160 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.460168 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.460182 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.460190 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: E1204 15:03:16.473273 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:16Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.477596 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.477645 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.477658 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.477677 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.477689 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: E1204 15:03:16.491301 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:16Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.497775 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.497811 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.497850 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.497863 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.497872 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: E1204 15:03:16.512151 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:16Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:16 crc kubenswrapper[4946]: E1204 15:03:16.512336 4946 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.513717 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.513763 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.513773 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.513787 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.513797 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.616774 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.616818 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.616835 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.616856 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.616891 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.719724 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.719772 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.719784 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.719803 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.719815 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.822771 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.822812 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.822823 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.822840 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.822853 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.926079 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.926167 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.926178 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.926195 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:16 crc kubenswrapper[4946]: I1204 15:03:16.926206 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:16Z","lastTransitionTime":"2025-12-04T15:03:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.029463 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.029508 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.029520 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.029545 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.029558 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:17Z","lastTransitionTime":"2025-12-04T15:03:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.133003 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.133049 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.133082 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.133100 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.133125 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:17Z","lastTransitionTime":"2025-12-04T15:03:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.236407 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.236462 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.236477 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.236498 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.236511 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:17Z","lastTransitionTime":"2025-12-04T15:03:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.339354 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.339395 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.339404 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.339424 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.339437 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:17Z","lastTransitionTime":"2025-12-04T15:03:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.442373 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.442448 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.442476 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.442507 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.442529 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:17Z","lastTransitionTime":"2025-12-04T15:03:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.452900 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:17 crc kubenswrapper[4946]: E1204 15:03:17.453168 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.545797 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.545867 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.545882 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.545953 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.545971 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:17Z","lastTransitionTime":"2025-12-04T15:03:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.648861 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.648907 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.648918 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.648940 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.648951 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:17Z","lastTransitionTime":"2025-12-04T15:03:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.751232 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.751278 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.751290 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.751309 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.751320 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:17Z","lastTransitionTime":"2025-12-04T15:03:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.853309 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.853344 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.853354 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.853370 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.853389 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:17Z","lastTransitionTime":"2025-12-04T15:03:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.955997 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.956038 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.956046 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.956065 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:17 crc kubenswrapper[4946]: I1204 15:03:17.956074 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:17Z","lastTransitionTime":"2025-12-04T15:03:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.059335 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.059407 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.059434 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.059709 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.059773 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:18Z","lastTransitionTime":"2025-12-04T15:03:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.161697 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.161808 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.161822 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.161846 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.161862 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:18Z","lastTransitionTime":"2025-12-04T15:03:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.264094 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.264169 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.264181 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.264198 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.264207 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:18Z","lastTransitionTime":"2025-12-04T15:03:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.366754 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.366831 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.366854 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.366884 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.366901 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:18Z","lastTransitionTime":"2025-12-04T15:03:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.451931 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.452007 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:18 crc kubenswrapper[4946]: E1204 15:03:18.452083 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.451939 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:18 crc kubenswrapper[4946]: E1204 15:03:18.452289 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:18 crc kubenswrapper[4946]: E1204 15:03:18.452481 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.469938 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.470337 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.470521 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.470684 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.470831 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:18Z","lastTransitionTime":"2025-12-04T15:03:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.573610 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.573671 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.573683 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.573700 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.573710 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:18Z","lastTransitionTime":"2025-12-04T15:03:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.676166 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.676215 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.676227 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.676249 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.676267 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:18Z","lastTransitionTime":"2025-12-04T15:03:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.778908 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.779234 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.779405 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.779516 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.779604 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:18Z","lastTransitionTime":"2025-12-04T15:03:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.882630 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.882686 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.882697 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.882713 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.882725 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:18Z","lastTransitionTime":"2025-12-04T15:03:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.985038 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.985350 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.985426 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.985498 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:18 crc kubenswrapper[4946]: I1204 15:03:18.985564 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:18Z","lastTransitionTime":"2025-12-04T15:03:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.089554 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.089611 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.089625 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.089650 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.089661 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:19Z","lastTransitionTime":"2025-12-04T15:03:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.192217 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.192269 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.192280 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.192365 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.192381 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:19Z","lastTransitionTime":"2025-12-04T15:03:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.295617 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.295662 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.295678 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.295695 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.295710 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:19Z","lastTransitionTime":"2025-12-04T15:03:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.398439 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.398492 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.398503 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.398521 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.398533 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:19Z","lastTransitionTime":"2025-12-04T15:03:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.452635 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:19 crc kubenswrapper[4946]: E1204 15:03:19.452879 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.474620 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.491424 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.501156 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.501203 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.501221 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.501248 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.501271 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:19Z","lastTransitionTime":"2025-12-04T15:03:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.504101 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.519842 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.532963 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.547863 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.569265 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:07Z\\\",\\\"message\\\":\\\"vices.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:06.746734 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:06.746752 6418 services_controller.go:360] Finished syncing service machine-config-operator on namespace openshift-machine-config-operator for network=default : 3.63617ms\\\\nI1204 15:03:06.746757 6418 services_controller.go:452] Built service\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.585707 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.599746 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.603206 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.603271 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.603284 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.603305 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.603317 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:19Z","lastTransitionTime":"2025-12-04T15:03:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.612064 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.625850 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.636757 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.649728 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.666181 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.683322 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.701570 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.706292 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.706322 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.706331 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.706345 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.706354 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:19Z","lastTransitionTime":"2025-12-04T15:03:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.809150 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.809191 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.809204 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.809221 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.809233 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:19Z","lastTransitionTime":"2025-12-04T15:03:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.911626 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.911667 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.911678 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.911694 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:19 crc kubenswrapper[4946]: I1204 15:03:19.911706 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:19Z","lastTransitionTime":"2025-12-04T15:03:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.013919 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.013969 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.013979 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.014000 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.014013 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:20Z","lastTransitionTime":"2025-12-04T15:03:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.117248 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.117304 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.117324 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.117350 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.117374 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:20Z","lastTransitionTime":"2025-12-04T15:03:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.220389 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.220459 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.220477 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.220502 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.220520 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:20Z","lastTransitionTime":"2025-12-04T15:03:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.323346 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.323380 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.323391 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.323407 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.323419 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:20Z","lastTransitionTime":"2025-12-04T15:03:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.426080 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.426123 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.426132 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.426147 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.426156 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:20Z","lastTransitionTime":"2025-12-04T15:03:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.483814 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.483952 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:20 crc kubenswrapper[4946]: E1204 15:03:20.484094 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.484275 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:20 crc kubenswrapper[4946]: E1204 15:03:20.484319 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:20 crc kubenswrapper[4946]: E1204 15:03:20.484639 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.488526 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.502543 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.505713 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.518935 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.529356 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.529497 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.529521 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.529667 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.529714 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:20Z","lastTransitionTime":"2025-12-04T15:03:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.536524 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.547495 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.559155 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.571468 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.583967 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.596292 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.613247 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.629678 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.632419 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.632613 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.632706 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.632798 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.632874 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:20Z","lastTransitionTime":"2025-12-04T15:03:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.644851 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.660273 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.674857 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.690661 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.711897 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:07Z\\\",\\\"message\\\":\\\"vices.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:06.746734 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:06.746752 6418 services_controller.go:360] Finished syncing service machine-config-operator on namespace openshift-machine-config-operator for network=default : 3.63617ms\\\\nI1204 15:03:06.746757 6418 services_controller.go:452] Built service\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.727107 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:20Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.735225 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.735279 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.735292 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.735312 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.735323 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:20Z","lastTransitionTime":"2025-12-04T15:03:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.837609 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.837642 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.837650 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.837663 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.837672 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:20Z","lastTransitionTime":"2025-12-04T15:03:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.940105 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.940153 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.940163 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.940176 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:20 crc kubenswrapper[4946]: I1204 15:03:20.940187 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:20Z","lastTransitionTime":"2025-12-04T15:03:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.037067 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.037277 4946 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.037355 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs podName:0a3cccbb-17c2-487d-a952-6b5d50656e2a nodeName:}" failed. No retries permitted until 2025-12-04 15:03:37.037334174 +0000 UTC m=+67.923377815 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs") pod "network-metrics-daemon-9xbtr" (UID: "0a3cccbb-17c2-487d-a952-6b5d50656e2a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.042849 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.042917 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.042936 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.042963 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.042981 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:21Z","lastTransitionTime":"2025-12-04T15:03:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.137905 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.138097 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:03:53.138081199 +0000 UTC m=+84.024124840 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.145915 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.145957 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.145978 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.145999 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.146013 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:21Z","lastTransitionTime":"2025-12-04T15:03:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.238771 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.238836 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.238884 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.238934 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239046 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239088 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239102 4946 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239157 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239166 4946 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239191 4946 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239193 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239238 4946 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239200 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:53.239175765 +0000 UTC m=+84.125219406 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239330 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:53.239302368 +0000 UTC m=+84.125346019 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239362 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:53.239348899 +0000 UTC m=+84.125392770 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.239392 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 15:03:53.23938361 +0000 UTC m=+84.125427261 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.249036 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.249153 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.249166 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.249205 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.249220 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:21Z","lastTransitionTime":"2025-12-04T15:03:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.353459 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.353567 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.353595 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.353631 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.353656 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:21Z","lastTransitionTime":"2025-12-04T15:03:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.452378 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:21 crc kubenswrapper[4946]: E1204 15:03:21.452620 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.458256 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.458411 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.458433 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.458466 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.458489 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:21Z","lastTransitionTime":"2025-12-04T15:03:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.561903 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.561969 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.561980 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.562000 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.562051 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:21Z","lastTransitionTime":"2025-12-04T15:03:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.665081 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.665146 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.665157 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.665173 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.665184 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:21Z","lastTransitionTime":"2025-12-04T15:03:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.768647 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.768709 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.768723 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.768742 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.768757 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:21Z","lastTransitionTime":"2025-12-04T15:03:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.872221 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.872267 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.872277 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.872293 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.872304 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:21Z","lastTransitionTime":"2025-12-04T15:03:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.975532 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.975585 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.975601 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.975622 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:21 crc kubenswrapper[4946]: I1204 15:03:21.975636 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:21Z","lastTransitionTime":"2025-12-04T15:03:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.078966 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.079020 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.079038 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.079058 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.079073 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:22Z","lastTransitionTime":"2025-12-04T15:03:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.181963 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.182016 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.182026 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.182048 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.182062 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:22Z","lastTransitionTime":"2025-12-04T15:03:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.285962 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.286063 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.286078 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.286102 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.286131 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:22Z","lastTransitionTime":"2025-12-04T15:03:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.389634 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.389684 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.389697 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.389717 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.389730 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:22Z","lastTransitionTime":"2025-12-04T15:03:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.452066 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.452067 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:22 crc kubenswrapper[4946]: E1204 15:03:22.452418 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:22 crc kubenswrapper[4946]: E1204 15:03:22.452254 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.452077 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:22 crc kubenswrapper[4946]: E1204 15:03:22.452614 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.492924 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.492994 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.493008 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.493029 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.493041 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:22Z","lastTransitionTime":"2025-12-04T15:03:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.595999 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.596044 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.596053 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.596069 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.596078 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:22Z","lastTransitionTime":"2025-12-04T15:03:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.699309 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.699352 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.699369 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.699393 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.699410 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:22Z","lastTransitionTime":"2025-12-04T15:03:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.802377 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.802413 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.802424 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.802438 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.802448 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:22Z","lastTransitionTime":"2025-12-04T15:03:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.904692 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.904736 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.904746 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.904762 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:22 crc kubenswrapper[4946]: I1204 15:03:22.904773 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:22Z","lastTransitionTime":"2025-12-04T15:03:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.007827 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.007885 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.007906 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.007932 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.007949 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:23Z","lastTransitionTime":"2025-12-04T15:03:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.110677 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.110733 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.110747 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.110799 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.110813 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:23Z","lastTransitionTime":"2025-12-04T15:03:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.213913 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.213964 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.213977 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.213998 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.214016 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:23Z","lastTransitionTime":"2025-12-04T15:03:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.316461 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.316515 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.316530 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.316552 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.316567 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:23Z","lastTransitionTime":"2025-12-04T15:03:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.419529 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.419591 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.419609 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.419630 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.419643 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:23Z","lastTransitionTime":"2025-12-04T15:03:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.452261 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:23 crc kubenswrapper[4946]: E1204 15:03:23.452455 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.522364 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.522686 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.522772 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.522855 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.522982 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:23Z","lastTransitionTime":"2025-12-04T15:03:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.626511 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.626566 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.626580 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.626609 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.626632 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:23Z","lastTransitionTime":"2025-12-04T15:03:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.730474 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.730943 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.731040 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.731175 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.731275 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:23Z","lastTransitionTime":"2025-12-04T15:03:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.833943 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.834008 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.834026 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.834050 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.834068 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:23Z","lastTransitionTime":"2025-12-04T15:03:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.936586 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.936643 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.936660 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.936680 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:23 crc kubenswrapper[4946]: I1204 15:03:23.936694 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:23Z","lastTransitionTime":"2025-12-04T15:03:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.040529 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.040613 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.040632 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.040659 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.040680 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:24Z","lastTransitionTime":"2025-12-04T15:03:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.144181 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.144232 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.144242 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.144259 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.144269 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:24Z","lastTransitionTime":"2025-12-04T15:03:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.248180 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.248239 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.248252 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.248277 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.248295 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:24Z","lastTransitionTime":"2025-12-04T15:03:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.351578 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.351619 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.351628 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.351646 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.351657 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:24Z","lastTransitionTime":"2025-12-04T15:03:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.452756 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.452789 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.452780 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:24 crc kubenswrapper[4946]: E1204 15:03:24.452928 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:24 crc kubenswrapper[4946]: E1204 15:03:24.453689 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.454079 4946 scope.go:117] "RemoveContainer" containerID="ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850" Dec 04 15:03:24 crc kubenswrapper[4946]: E1204 15:03:24.454434 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.454675 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.454708 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.454717 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.454733 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.454742 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:24Z","lastTransitionTime":"2025-12-04T15:03:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.558264 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.558774 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.558789 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.558810 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.558827 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:24Z","lastTransitionTime":"2025-12-04T15:03:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.662070 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.662106 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.662154 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.662176 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.662195 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:24Z","lastTransitionTime":"2025-12-04T15:03:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.766066 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.766155 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.766174 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.766200 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.766219 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:24Z","lastTransitionTime":"2025-12-04T15:03:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.867500 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/1.log" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.868809 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.868842 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.868853 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.868870 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.868882 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:24Z","lastTransitionTime":"2025-12-04T15:03:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.875017 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.876260 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.876764 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.895467 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:24Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.919452 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:24Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.931652 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:24Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.945054 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:24Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.957003 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:24Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.970940 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.970973 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.970981 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.970999 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.971009 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:24Z","lastTransitionTime":"2025-12-04T15:03:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.971028 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:24Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.983536 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:24Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:24 crc kubenswrapper[4946]: I1204 15:03:24.998831 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:24Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.015402 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.035073 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.049813 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.065647 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.074335 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.074403 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.074416 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.074457 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.074470 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:25Z","lastTransitionTime":"2025-12-04T15:03:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.083992 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.102734 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.128815 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.147013 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.168482 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:07Z\\\",\\\"message\\\":\\\"vices.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:06.746734 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:06.746752 6418 services_controller.go:360] Finished syncing service machine-config-operator on namespace openshift-machine-config-operator for network=default : 3.63617ms\\\\nI1204 15:03:06.746757 6418 services_controller.go:452] Built service\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.179146 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.179981 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.180026 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.180049 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.180064 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:25Z","lastTransitionTime":"2025-12-04T15:03:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.283290 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.283346 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.283357 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.283378 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.283396 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:25Z","lastTransitionTime":"2025-12-04T15:03:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.386388 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.386433 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.386446 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.386464 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.386478 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:25Z","lastTransitionTime":"2025-12-04T15:03:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.452365 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:25 crc kubenswrapper[4946]: E1204 15:03:25.452554 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.489251 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.489316 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.489329 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.489346 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.489412 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:25Z","lastTransitionTime":"2025-12-04T15:03:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.591841 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.591893 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.591907 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.591925 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.591938 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:25Z","lastTransitionTime":"2025-12-04T15:03:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.695222 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.695312 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.695330 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.695351 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.695366 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:25Z","lastTransitionTime":"2025-12-04T15:03:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.798314 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.798363 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.798379 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.798406 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.798423 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:25Z","lastTransitionTime":"2025-12-04T15:03:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.882772 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/2.log" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.883937 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/1.log" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.886792 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.887563 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1" exitCode=1 Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.887617 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1"} Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.887674 4946 scope.go:117] "RemoveContainer" containerID="ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.888444 4946 scope.go:117] "RemoveContainer" containerID="096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1" Dec 04 15:03:25 crc kubenswrapper[4946]: E1204 15:03:25.888651 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.901681 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.901727 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.901747 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.901772 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.901791 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:25Z","lastTransitionTime":"2025-12-04T15:03:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.908148 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.924444 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.938612 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.950729 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.962916 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:25 crc kubenswrapper[4946]: I1204 15:03:25.977737 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.003160 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.006451 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.006519 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.006550 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.006572 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.006586 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.039825 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ddd21480f07398e0cd0112286114dea521f73db91c70aa7b6a0f9906fade5850\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:07Z\\\",\\\"message\\\":\\\"vices.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:06.746734 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:06Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:06.746752 6418 services_controller.go:360] Finished syncing service machine-config-operator on namespace openshift-machine-config-operator for network=default : 3.63617ms\\\\nI1204 15:03:06.746757 6418 services_controller.go:452] Built service\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:25Z\\\",\\\"message\\\":\\\"LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:25.287620 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:25.287647 6635 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.055399 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.067509 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.080829 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.095257 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.109305 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.109405 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.109428 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.109459 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.109504 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.110945 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.123856 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.135458 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.148895 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.170502 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.212900 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.212966 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.212999 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.213028 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.213049 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.315536 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.315613 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.315637 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.315664 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.315682 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.418235 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.418823 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.418865 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.418892 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.418914 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.452216 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.452246 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.452488 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:26 crc kubenswrapper[4946]: E1204 15:03:26.452413 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:26 crc kubenswrapper[4946]: E1204 15:03:26.452702 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:26 crc kubenswrapper[4946]: E1204 15:03:26.452927 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.521914 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.522328 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.522498 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.522714 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.522988 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.549346 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.549583 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.549979 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.550183 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.550322 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: E1204 15:03:26.585098 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.589997 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.590065 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.590084 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.590113 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.590173 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: E1204 15:03:26.612094 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.616836 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.616905 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.616922 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.616949 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.616965 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: E1204 15:03:26.630293 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.634624 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.634676 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.634689 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.634709 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.634723 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: E1204 15:03:26.650309 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.655934 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.656089 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.656180 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.656260 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.656336 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: E1204 15:03:26.670967 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: E1204 15:03:26.671133 4946 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.673153 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.673188 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.673223 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.673242 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.673253 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.776644 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.776711 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.776725 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.776748 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.776765 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.880094 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.880207 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.880224 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.880249 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.880264 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.894365 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/2.log" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.898268 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.900466 4946 scope.go:117] "RemoveContainer" containerID="096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1" Dec 04 15:03:26 crc kubenswrapper[4946]: E1204 15:03:26.900753 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.919663 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.943192 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.971222 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:25Z\\\",\\\"message\\\":\\\"LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:25.287620 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:25.287647 6635 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.984479 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.984565 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.984594 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.984649 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.984674 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:26Z","lastTransitionTime":"2025-12-04T15:03:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:26 crc kubenswrapper[4946]: I1204 15:03:26.986900 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:26Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.003982 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.017078 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.031510 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.044307 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.060994 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.077678 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.087968 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.088005 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.088013 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.088029 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.088041 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:27Z","lastTransitionTime":"2025-12-04T15:03:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.099884 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.116601 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.132638 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.151356 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.166377 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.182501 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.190820 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.190884 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.190901 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.190984 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.191039 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:27Z","lastTransitionTime":"2025-12-04T15:03:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.197259 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:27Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.293833 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.293868 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.293876 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.293889 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.293899 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:27Z","lastTransitionTime":"2025-12-04T15:03:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.397085 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.397148 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.397162 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.397180 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.397192 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:27Z","lastTransitionTime":"2025-12-04T15:03:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.452288 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:27 crc kubenswrapper[4946]: E1204 15:03:27.452433 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.500035 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.500100 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.500185 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.500218 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.500253 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:27Z","lastTransitionTime":"2025-12-04T15:03:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.602671 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.602703 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.602713 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.602726 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.602735 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:27Z","lastTransitionTime":"2025-12-04T15:03:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.706244 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.706293 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.706312 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.706336 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.706353 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:27Z","lastTransitionTime":"2025-12-04T15:03:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.809652 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.809709 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.809726 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.809749 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.809766 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:27Z","lastTransitionTime":"2025-12-04T15:03:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.912294 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.912347 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.912358 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.912375 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:27 crc kubenswrapper[4946]: I1204 15:03:27.912389 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:27Z","lastTransitionTime":"2025-12-04T15:03:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.015594 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.015656 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.015679 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.015704 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.015718 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:28Z","lastTransitionTime":"2025-12-04T15:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.119330 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.119394 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.119406 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.119428 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.119440 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:28Z","lastTransitionTime":"2025-12-04T15:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.221993 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.222096 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.222107 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.222136 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.222145 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:28Z","lastTransitionTime":"2025-12-04T15:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.324886 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.324946 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.324956 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.324981 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.324994 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:28Z","lastTransitionTime":"2025-12-04T15:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.428535 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.428599 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.428613 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.428637 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.428666 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:28Z","lastTransitionTime":"2025-12-04T15:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.452151 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.452274 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.452288 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:28 crc kubenswrapper[4946]: E1204 15:03:28.452444 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:28 crc kubenswrapper[4946]: E1204 15:03:28.452582 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:28 crc kubenswrapper[4946]: E1204 15:03:28.452691 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.532239 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.532310 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.532320 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.532342 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.532356 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:28Z","lastTransitionTime":"2025-12-04T15:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.635405 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.635457 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.635467 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.635505 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.635516 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:28Z","lastTransitionTime":"2025-12-04T15:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.738290 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.738340 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.738354 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.738374 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.738387 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:28Z","lastTransitionTime":"2025-12-04T15:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.842342 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.842441 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.842474 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.842512 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.842537 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:28Z","lastTransitionTime":"2025-12-04T15:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.945945 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.946096 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.946254 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.946336 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:28 crc kubenswrapper[4946]: I1204 15:03:28.946373 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:28Z","lastTransitionTime":"2025-12-04T15:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.050082 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.050179 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.050193 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.050218 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.050232 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:29Z","lastTransitionTime":"2025-12-04T15:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.153928 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.153985 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.153997 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.154017 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.154033 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:29Z","lastTransitionTime":"2025-12-04T15:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.256437 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.256508 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.256534 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.256567 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.256591 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:29Z","lastTransitionTime":"2025-12-04T15:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.360284 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.360347 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.360359 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.360384 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.360399 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:29Z","lastTransitionTime":"2025-12-04T15:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.451827 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:29 crc kubenswrapper[4946]: E1204 15:03:29.451951 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.462931 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.462985 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.463026 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.463046 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.463061 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:29Z","lastTransitionTime":"2025-12-04T15:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.469428 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.487023 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.501149 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.515975 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.530333 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.547505 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.562519 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.565151 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.565329 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.565445 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.565548 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.565631 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:29Z","lastTransitionTime":"2025-12-04T15:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.579067 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.592357 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.611220 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:25Z\\\",\\\"message\\\":\\\"LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:25.287620 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:25.287647 6635 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.626030 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.641260 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.654017 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.669208 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.669267 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.669278 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.669303 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.669315 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:29Z","lastTransitionTime":"2025-12-04T15:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.669326 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.681698 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.696053 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.710876 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:29Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.772363 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.772478 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.772490 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.772507 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.772568 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:29Z","lastTransitionTime":"2025-12-04T15:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.876053 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.876149 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.876169 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.876193 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.876210 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:29Z","lastTransitionTime":"2025-12-04T15:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.978984 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.979038 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.979053 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.979072 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:29 crc kubenswrapper[4946]: I1204 15:03:29.979084 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:29Z","lastTransitionTime":"2025-12-04T15:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.082232 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.082268 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.082276 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.082289 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.082298 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:30Z","lastTransitionTime":"2025-12-04T15:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.186236 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.186285 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.186294 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.186319 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.186339 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:30Z","lastTransitionTime":"2025-12-04T15:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.289263 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.289317 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.289329 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.289348 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.289361 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:30Z","lastTransitionTime":"2025-12-04T15:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.392697 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.393221 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.393833 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.393964 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.394199 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:30Z","lastTransitionTime":"2025-12-04T15:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.452630 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:30 crc kubenswrapper[4946]: E1204 15:03:30.452922 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.453465 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:30 crc kubenswrapper[4946]: E1204 15:03:30.453738 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.454003 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:30 crc kubenswrapper[4946]: E1204 15:03:30.454266 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.497823 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.498486 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.498562 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.498587 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.498649 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:30Z","lastTransitionTime":"2025-12-04T15:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.602610 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.602664 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.602682 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.602709 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.602730 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:30Z","lastTransitionTime":"2025-12-04T15:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.705982 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.706053 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.706066 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.706082 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.706143 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:30Z","lastTransitionTime":"2025-12-04T15:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.808705 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.808754 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.808765 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.808782 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.808795 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:30Z","lastTransitionTime":"2025-12-04T15:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.911844 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.911900 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.911910 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.911926 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:30 crc kubenswrapper[4946]: I1204 15:03:30.912208 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:30Z","lastTransitionTime":"2025-12-04T15:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.015635 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.015682 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.015693 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.015713 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.015726 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:31Z","lastTransitionTime":"2025-12-04T15:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.117849 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.117886 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.117896 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.117911 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.117920 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:31Z","lastTransitionTime":"2025-12-04T15:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.219795 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.219846 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.219859 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.219879 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.219892 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:31Z","lastTransitionTime":"2025-12-04T15:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.323328 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.323382 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.323423 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.323442 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.323455 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:31Z","lastTransitionTime":"2025-12-04T15:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.434259 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.434978 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.435089 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.435252 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.435371 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:31Z","lastTransitionTime":"2025-12-04T15:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.451971 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:31 crc kubenswrapper[4946]: E1204 15:03:31.452445 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.538015 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.538702 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.538777 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.538852 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.538915 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:31Z","lastTransitionTime":"2025-12-04T15:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.642769 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.642814 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.642828 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.642847 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.642859 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:31Z","lastTransitionTime":"2025-12-04T15:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.745743 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.745788 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.745803 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.745829 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.745841 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:31Z","lastTransitionTime":"2025-12-04T15:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.848625 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.848651 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.848661 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.848675 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.848684 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:31Z","lastTransitionTime":"2025-12-04T15:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.950896 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.950920 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.950927 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.950940 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:31 crc kubenswrapper[4946]: I1204 15:03:31.950947 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:31Z","lastTransitionTime":"2025-12-04T15:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.053510 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.053542 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.053551 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.053564 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.053572 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:32Z","lastTransitionTime":"2025-12-04T15:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.157081 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.157355 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.157424 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.157489 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.157549 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:32Z","lastTransitionTime":"2025-12-04T15:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.260722 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.261042 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.261156 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.261266 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.261358 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:32Z","lastTransitionTime":"2025-12-04T15:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.364312 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.364366 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.364378 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.364402 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.364417 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:32Z","lastTransitionTime":"2025-12-04T15:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.452425 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.452518 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:32 crc kubenswrapper[4946]: E1204 15:03:32.453064 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:32 crc kubenswrapper[4946]: E1204 15:03:32.452835 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.452577 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:32 crc kubenswrapper[4946]: E1204 15:03:32.453244 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.467692 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.467756 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.467772 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.467797 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.467812 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:32Z","lastTransitionTime":"2025-12-04T15:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.570975 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.571047 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.571063 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.571096 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.571110 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:32Z","lastTransitionTime":"2025-12-04T15:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.673285 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.673319 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.673328 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.673341 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.673351 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:32Z","lastTransitionTime":"2025-12-04T15:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.775867 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.775906 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.775918 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.775938 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.775950 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:32Z","lastTransitionTime":"2025-12-04T15:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.878011 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.878043 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.878056 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.878073 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.878085 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:32Z","lastTransitionTime":"2025-12-04T15:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.980409 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.980460 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.980472 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.980487 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:32 crc kubenswrapper[4946]: I1204 15:03:32.980500 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:32Z","lastTransitionTime":"2025-12-04T15:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.084221 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.084289 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.084305 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.084347 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.084364 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:33Z","lastTransitionTime":"2025-12-04T15:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.187135 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.187181 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.187193 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.187210 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.187222 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:33Z","lastTransitionTime":"2025-12-04T15:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.289490 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.289539 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.289552 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.289570 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.289582 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:33Z","lastTransitionTime":"2025-12-04T15:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.391592 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.391662 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.391683 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.391706 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.391723 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:33Z","lastTransitionTime":"2025-12-04T15:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.452629 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:33 crc kubenswrapper[4946]: E1204 15:03:33.452821 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.493877 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.493922 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.493933 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.493951 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.493961 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:33Z","lastTransitionTime":"2025-12-04T15:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.596655 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.596710 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.596721 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.596742 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.596755 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:33Z","lastTransitionTime":"2025-12-04T15:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.699693 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.699746 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.699761 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.699783 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.699794 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:33Z","lastTransitionTime":"2025-12-04T15:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.802670 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.802721 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.802736 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.802756 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.802768 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:33Z","lastTransitionTime":"2025-12-04T15:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.905153 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.905207 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.905216 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.905231 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:33 crc kubenswrapper[4946]: I1204 15:03:33.905244 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:33Z","lastTransitionTime":"2025-12-04T15:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.007847 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.007890 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.007899 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.007914 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.007925 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:34Z","lastTransitionTime":"2025-12-04T15:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.110586 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.110634 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.110648 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.110665 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.110676 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:34Z","lastTransitionTime":"2025-12-04T15:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.212946 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.213005 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.213020 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.213040 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.213054 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:34Z","lastTransitionTime":"2025-12-04T15:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.315609 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.315650 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.315663 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.315685 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.315696 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:34Z","lastTransitionTime":"2025-12-04T15:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.418959 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.419505 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.419610 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.419726 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.419799 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:34Z","lastTransitionTime":"2025-12-04T15:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.452569 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:34 crc kubenswrapper[4946]: E1204 15:03:34.452734 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.452611 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:34 crc kubenswrapper[4946]: E1204 15:03:34.452810 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.452589 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:34 crc kubenswrapper[4946]: E1204 15:03:34.452863 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.522859 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.522936 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.522949 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.522969 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.522982 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:34Z","lastTransitionTime":"2025-12-04T15:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.626056 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.626104 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.626163 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.626180 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.626191 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:34Z","lastTransitionTime":"2025-12-04T15:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.729071 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.729148 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.729166 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.729189 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.729204 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:34Z","lastTransitionTime":"2025-12-04T15:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.831971 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.832009 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.832023 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.832043 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.832057 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:34Z","lastTransitionTime":"2025-12-04T15:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.935556 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.935600 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.935610 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.935627 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:34 crc kubenswrapper[4946]: I1204 15:03:34.935638 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:34Z","lastTransitionTime":"2025-12-04T15:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.039102 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.039179 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.039189 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.039207 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.039219 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:35Z","lastTransitionTime":"2025-12-04T15:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.141950 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.141995 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.142015 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.142034 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.142047 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:35Z","lastTransitionTime":"2025-12-04T15:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.245187 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.245244 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.245258 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.245278 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.245292 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:35Z","lastTransitionTime":"2025-12-04T15:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.347925 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.347970 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.347981 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.348002 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.348014 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:35Z","lastTransitionTime":"2025-12-04T15:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.450568 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.450619 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.450629 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.450645 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.450654 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:35Z","lastTransitionTime":"2025-12-04T15:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.451870 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:35 crc kubenswrapper[4946]: E1204 15:03:35.452032 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.553272 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.553327 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.553336 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.553352 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.553363 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:35Z","lastTransitionTime":"2025-12-04T15:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.657334 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.657381 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.657393 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.657412 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.657425 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:35Z","lastTransitionTime":"2025-12-04T15:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.760285 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.760343 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.760353 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.760376 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.760389 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:35Z","lastTransitionTime":"2025-12-04T15:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.863707 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.864160 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.864528 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.864642 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.864752 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:35Z","lastTransitionTime":"2025-12-04T15:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.966788 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.966865 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.966880 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.966901 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:35 crc kubenswrapper[4946]: I1204 15:03:35.966914 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:35Z","lastTransitionTime":"2025-12-04T15:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.069090 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.069180 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.069199 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.069227 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.069249 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:36Z","lastTransitionTime":"2025-12-04T15:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.173991 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.174037 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.174050 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.174069 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.174081 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:36Z","lastTransitionTime":"2025-12-04T15:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.276529 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.276600 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.276609 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.276631 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.276644 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:36Z","lastTransitionTime":"2025-12-04T15:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.379756 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.379792 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.379804 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.379820 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.379832 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:36Z","lastTransitionTime":"2025-12-04T15:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.452011 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.452085 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:36 crc kubenswrapper[4946]: E1204 15:03:36.452215 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.452257 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:36 crc kubenswrapper[4946]: E1204 15:03:36.452303 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:36 crc kubenswrapper[4946]: E1204 15:03:36.452423 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.484068 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.484157 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.484175 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.484195 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.484731 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:36Z","lastTransitionTime":"2025-12-04T15:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.587888 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.587934 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.587950 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.587973 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.587991 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:36Z","lastTransitionTime":"2025-12-04T15:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.692150 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.692203 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.692215 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.692231 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.692245 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:36Z","lastTransitionTime":"2025-12-04T15:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.795100 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.795165 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.795174 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.795189 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.795197 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:36Z","lastTransitionTime":"2025-12-04T15:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.897918 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.897956 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.897968 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.897985 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.897994 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:36Z","lastTransitionTime":"2025-12-04T15:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.967011 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.967067 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.967077 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.967092 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.967153 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:36Z","lastTransitionTime":"2025-12-04T15:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:36 crc kubenswrapper[4946]: E1204 15:03:36.983460 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:36Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.990145 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.990190 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.990200 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.990215 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:36 crc kubenswrapper[4946]: I1204 15:03:36.990226 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:36Z","lastTransitionTime":"2025-12-04T15:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: E1204 15:03:37.003362 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:37Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.007691 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.007720 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.007729 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.007743 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.007752 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: E1204 15:03:37.020967 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:37Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.025489 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.025531 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.025545 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.025563 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.025575 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: E1204 15:03:37.039224 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:37Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.045241 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.045290 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.045299 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.045317 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.045328 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.055365 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:37 crc kubenswrapper[4946]: E1204 15:03:37.055563 4946 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:37 crc kubenswrapper[4946]: E1204 15:03:37.055665 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs podName:0a3cccbb-17c2-487d-a952-6b5d50656e2a nodeName:}" failed. No retries permitted until 2025-12-04 15:04:09.055644019 +0000 UTC m=+99.941687660 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs") pod "network-metrics-daemon-9xbtr" (UID: "0a3cccbb-17c2-487d-a952-6b5d50656e2a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:03:37 crc kubenswrapper[4946]: E1204 15:03:37.058389 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:37Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:37 crc kubenswrapper[4946]: E1204 15:03:37.058510 4946 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.060100 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.060139 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.060148 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.060163 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.060172 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.162695 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.162746 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.162759 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.162776 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.162788 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.265401 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.265447 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.265456 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.265473 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.265483 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.367690 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.367728 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.367740 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.367757 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.367769 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.452678 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:37 crc kubenswrapper[4946]: E1204 15:03:37.452945 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.470631 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.470708 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.470722 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.470767 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.470784 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.573010 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.573044 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.573052 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.573068 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.573076 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.676132 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.676181 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.676192 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.676212 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.676222 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.777843 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.777887 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.777897 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.777917 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.777930 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.880598 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.880628 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.880637 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.880652 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.880661 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.983369 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.983424 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.983436 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.983454 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:37 crc kubenswrapper[4946]: I1204 15:03:37.983466 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:37Z","lastTransitionTime":"2025-12-04T15:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.085837 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.085879 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.085913 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.085930 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.085940 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:38Z","lastTransitionTime":"2025-12-04T15:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.189022 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.189091 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.189136 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.189167 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.189189 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:38Z","lastTransitionTime":"2025-12-04T15:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.291559 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.291598 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.291607 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.291624 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.291635 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:38Z","lastTransitionTime":"2025-12-04T15:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.394504 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.394540 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.394549 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.394563 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.394573 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:38Z","lastTransitionTime":"2025-12-04T15:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.452377 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.452407 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.452539 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:38 crc kubenswrapper[4946]: E1204 15:03:38.452627 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:38 crc kubenswrapper[4946]: E1204 15:03:38.452735 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:38 crc kubenswrapper[4946]: E1204 15:03:38.452841 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.497076 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.497142 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.497156 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.497174 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.497187 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:38Z","lastTransitionTime":"2025-12-04T15:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.600489 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.600551 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.600562 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.600581 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.600594 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:38Z","lastTransitionTime":"2025-12-04T15:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.703999 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.704059 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.704071 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.704089 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.704101 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:38Z","lastTransitionTime":"2025-12-04T15:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.806717 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.806759 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.806770 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.806786 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.806797 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:38Z","lastTransitionTime":"2025-12-04T15:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.908752 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.908790 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.908801 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.908817 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:38 crc kubenswrapper[4946]: I1204 15:03:38.908828 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:38Z","lastTransitionTime":"2025-12-04T15:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.011619 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.011663 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.011674 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.011692 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.011706 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:39Z","lastTransitionTime":"2025-12-04T15:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.114159 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.114489 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.114536 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.114561 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.114575 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:39Z","lastTransitionTime":"2025-12-04T15:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.216882 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.216938 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.216979 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.218486 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.218549 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:39Z","lastTransitionTime":"2025-12-04T15:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.321381 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.321417 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.321430 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.321446 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.321456 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:39Z","lastTransitionTime":"2025-12-04T15:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.423239 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.423270 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.423280 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.423295 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.423305 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:39Z","lastTransitionTime":"2025-12-04T15:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.454349 4946 scope.go:117] "RemoveContainer" containerID="096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.454446 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:39 crc kubenswrapper[4946]: E1204 15:03:39.454613 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" Dec 04 15:03:39 crc kubenswrapper[4946]: E1204 15:03:39.454745 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.467178 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.477872 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.494626 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.506354 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.516647 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.525438 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.525476 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.525487 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.525502 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.525516 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:39Z","lastTransitionTime":"2025-12-04T15:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.526173 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.535400 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.547542 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.564350 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.580692 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.597809 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.609881 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.623561 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.627639 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.627670 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.627682 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.627697 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.627709 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:39Z","lastTransitionTime":"2025-12-04T15:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.638253 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.650312 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.662756 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.683328 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:25Z\\\",\\\"message\\\":\\\"LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:25.287620 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:25.287647 6635 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:39Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.730020 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.730059 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.730070 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.730088 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.730100 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:39Z","lastTransitionTime":"2025-12-04T15:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.831710 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.831774 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.831783 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.831796 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.831804 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:39Z","lastTransitionTime":"2025-12-04T15:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.934692 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.934752 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.934770 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.934795 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:39 crc kubenswrapper[4946]: I1204 15:03:39.934815 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:39Z","lastTransitionTime":"2025-12-04T15:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.037434 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.037488 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.037496 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.037513 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.037523 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:40Z","lastTransitionTime":"2025-12-04T15:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.139453 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.139496 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.139509 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.139526 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.139537 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:40Z","lastTransitionTime":"2025-12-04T15:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.241965 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.242004 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.242014 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.242030 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.242041 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:40Z","lastTransitionTime":"2025-12-04T15:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.344518 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.344555 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.344563 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.344576 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.344587 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:40Z","lastTransitionTime":"2025-12-04T15:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.447356 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.447405 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.447416 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.447435 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.447446 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:40Z","lastTransitionTime":"2025-12-04T15:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.452659 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.452704 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:40 crc kubenswrapper[4946]: E1204 15:03:40.452772 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.452889 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:40 crc kubenswrapper[4946]: E1204 15:03:40.452950 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:40 crc kubenswrapper[4946]: E1204 15:03:40.453064 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.550346 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.550393 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.550403 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.550421 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.550434 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:40Z","lastTransitionTime":"2025-12-04T15:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.653221 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.653264 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.653273 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.653289 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.653298 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:40Z","lastTransitionTime":"2025-12-04T15:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.755813 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.755880 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.755894 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.755919 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.755935 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:40Z","lastTransitionTime":"2025-12-04T15:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.858677 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.858709 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.858720 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.858736 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.858747 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:40Z","lastTransitionTime":"2025-12-04T15:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.961825 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.961903 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.961913 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.961956 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.961969 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:40Z","lastTransitionTime":"2025-12-04T15:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.978526 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fjmh5_f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09/kube-multus/0.log" Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.978582 4946 generic.go:334] "Generic (PLEG): container finished" podID="f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09" containerID="c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f" exitCode=1 Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.978615 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fjmh5" event={"ID":"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09","Type":"ContainerDied","Data":"c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f"} Dec 04 15:03:40 crc kubenswrapper[4946]: I1204 15:03:40.979184 4946 scope.go:117] "RemoveContainer" containerID="c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.004655 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:25Z\\\",\\\"message\\\":\\\"LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:25.287620 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:25.287647 6635 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.016006 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.029599 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:40Z\\\",\\\"message\\\":\\\"2025-12-04T15:02:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c\\\\n2025-12-04T15:02:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c to /host/opt/cni/bin/\\\\n2025-12-04T15:02:55Z [verbose] multus-daemon started\\\\n2025-12-04T15:02:55Z [verbose] Readiness Indicator file check\\\\n2025-12-04T15:03:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.041532 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.053099 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.065083 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.065192 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.065207 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.065250 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.065266 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:41Z","lastTransitionTime":"2025-12-04T15:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.067347 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.079928 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.091844 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.106614 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.121762 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.136049 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.153232 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.167276 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.167516 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.167710 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.167812 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.167901 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:41Z","lastTransitionTime":"2025-12-04T15:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.169810 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.183691 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.201805 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.216091 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.229008 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.271140 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.271616 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.271681 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.271748 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.271806 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:41Z","lastTransitionTime":"2025-12-04T15:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.374977 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.375019 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.375032 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.375047 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.375058 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:41Z","lastTransitionTime":"2025-12-04T15:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.452378 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:41 crc kubenswrapper[4946]: E1204 15:03:41.452694 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.482993 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.483557 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.483574 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.483592 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.483605 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:41Z","lastTransitionTime":"2025-12-04T15:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.586908 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.587244 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.587318 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.587397 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.587481 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:41Z","lastTransitionTime":"2025-12-04T15:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.689386 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.689421 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.689431 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.689445 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.689454 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:41Z","lastTransitionTime":"2025-12-04T15:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.791645 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.791683 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.791694 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.791709 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.791720 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:41Z","lastTransitionTime":"2025-12-04T15:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.894690 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.894733 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.894748 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.894770 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.894785 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:41Z","lastTransitionTime":"2025-12-04T15:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.983091 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fjmh5_f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09/kube-multus/0.log" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.983346 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fjmh5" event={"ID":"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09","Type":"ContainerStarted","Data":"482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96"} Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.995380 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:41Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.996892 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.996926 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.996937 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.996953 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:41 crc kubenswrapper[4946]: I1204 15:03:41.996963 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:41Z","lastTransitionTime":"2025-12-04T15:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.007205 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.020861 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.034906 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.047978 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.058231 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.071478 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.086319 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.100109 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.100180 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.100192 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.100211 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.100224 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:42Z","lastTransitionTime":"2025-12-04T15:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.102293 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.118863 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.135366 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.147860 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.160938 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.172286 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.182646 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.194340 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:40Z\\\",\\\"message\\\":\\\"2025-12-04T15:02:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c\\\\n2025-12-04T15:02:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c to /host/opt/cni/bin/\\\\n2025-12-04T15:02:55Z [verbose] multus-daemon started\\\\n2025-12-04T15:02:55Z [verbose] Readiness Indicator file check\\\\n2025-12-04T15:03:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.202973 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.203019 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.203029 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.203046 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.203055 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:42Z","lastTransitionTime":"2025-12-04T15:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.213337 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:25Z\\\",\\\"message\\\":\\\"LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:25.287620 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:25.287647 6635 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:42Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.305296 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.305342 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.305351 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.305365 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.305376 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:42Z","lastTransitionTime":"2025-12-04T15:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.408583 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.408622 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.408636 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.408652 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.408663 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:42Z","lastTransitionTime":"2025-12-04T15:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.452014 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.452147 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:42 crc kubenswrapper[4946]: E1204 15:03:42.452259 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.452405 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:42 crc kubenswrapper[4946]: E1204 15:03:42.452559 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:42 crc kubenswrapper[4946]: E1204 15:03:42.452630 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.511856 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.511901 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.511910 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.511929 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.511938 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:42Z","lastTransitionTime":"2025-12-04T15:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.614271 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.614312 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.614322 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.614336 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.614345 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:42Z","lastTransitionTime":"2025-12-04T15:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.716570 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.716630 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.716644 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.716666 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.716684 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:42Z","lastTransitionTime":"2025-12-04T15:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.819033 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.819064 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.819073 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.819088 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.819097 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:42Z","lastTransitionTime":"2025-12-04T15:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.928881 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.928950 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.928977 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.929006 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:42 crc kubenswrapper[4946]: I1204 15:03:42.929022 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:42Z","lastTransitionTime":"2025-12-04T15:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.032197 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.032264 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.032277 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.032298 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.032311 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:43Z","lastTransitionTime":"2025-12-04T15:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.135107 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.135178 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.135194 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.135215 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.135230 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:43Z","lastTransitionTime":"2025-12-04T15:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.237692 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.237735 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.237744 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.237760 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.237776 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:43Z","lastTransitionTime":"2025-12-04T15:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.340688 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.340725 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.340735 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.340749 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.340761 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:43Z","lastTransitionTime":"2025-12-04T15:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.444447 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.444506 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.444517 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.444538 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.444550 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:43Z","lastTransitionTime":"2025-12-04T15:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.451995 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:43 crc kubenswrapper[4946]: E1204 15:03:43.452277 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.547793 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.547836 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.547845 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.547862 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.547872 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:43Z","lastTransitionTime":"2025-12-04T15:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.650009 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.650062 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.650075 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.650091 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.650102 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:43Z","lastTransitionTime":"2025-12-04T15:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.752839 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.752880 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.752890 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.752902 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.752912 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:43Z","lastTransitionTime":"2025-12-04T15:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.855875 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.855912 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.855924 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.855940 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.855950 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:43Z","lastTransitionTime":"2025-12-04T15:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.958457 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.958501 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.958509 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.958525 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:43 crc kubenswrapper[4946]: I1204 15:03:43.958533 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:43Z","lastTransitionTime":"2025-12-04T15:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.062013 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.062096 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.062162 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.062197 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.062221 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:44Z","lastTransitionTime":"2025-12-04T15:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.165029 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.165272 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.165282 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.165304 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.165314 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:44Z","lastTransitionTime":"2025-12-04T15:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.268102 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.268154 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.268166 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.268184 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.268196 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:44Z","lastTransitionTime":"2025-12-04T15:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.370899 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.370986 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.371007 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.371034 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.371053 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:44Z","lastTransitionTime":"2025-12-04T15:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.452642 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.452672 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.452744 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:44 crc kubenswrapper[4946]: E1204 15:03:44.452791 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:44 crc kubenswrapper[4946]: E1204 15:03:44.452904 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:44 crc kubenswrapper[4946]: E1204 15:03:44.453001 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.473835 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.473881 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.473892 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.473910 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.473923 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:44Z","lastTransitionTime":"2025-12-04T15:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.576696 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.576734 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.576744 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.576766 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.576784 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:44Z","lastTransitionTime":"2025-12-04T15:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.679252 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.679352 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.679371 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.679389 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.679401 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:44Z","lastTransitionTime":"2025-12-04T15:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.782596 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.783015 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.783081 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.783187 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.783351 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:44Z","lastTransitionTime":"2025-12-04T15:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.886415 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.886460 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.886469 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.886485 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.886496 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:44Z","lastTransitionTime":"2025-12-04T15:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.988835 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.989819 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.989959 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.990251 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:44 crc kubenswrapper[4946]: I1204 15:03:44.990623 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:44Z","lastTransitionTime":"2025-12-04T15:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.094047 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.094214 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.094231 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.094255 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.094271 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:45Z","lastTransitionTime":"2025-12-04T15:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.197762 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.197833 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.197855 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.197882 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.197906 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:45Z","lastTransitionTime":"2025-12-04T15:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.300792 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.300859 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.300877 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.300899 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.300919 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:45Z","lastTransitionTime":"2025-12-04T15:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.403907 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.404310 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.404566 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.404782 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.404978 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:45Z","lastTransitionTime":"2025-12-04T15:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.452502 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:45 crc kubenswrapper[4946]: E1204 15:03:45.452647 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.508507 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.508548 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.508558 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.508572 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.508582 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:45Z","lastTransitionTime":"2025-12-04T15:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.611653 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.611695 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.611704 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.611719 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.611728 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:45Z","lastTransitionTime":"2025-12-04T15:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.714230 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.714267 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.714279 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.714294 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.714307 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:45Z","lastTransitionTime":"2025-12-04T15:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.817191 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.817231 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.817241 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.817256 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.817267 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:45Z","lastTransitionTime":"2025-12-04T15:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.919931 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.919974 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.919983 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.919998 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:45 crc kubenswrapper[4946]: I1204 15:03:45.920008 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:45Z","lastTransitionTime":"2025-12-04T15:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.021734 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.021765 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.021775 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.021788 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.021797 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:46Z","lastTransitionTime":"2025-12-04T15:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.124549 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.124609 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.124625 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.124645 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.124660 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:46Z","lastTransitionTime":"2025-12-04T15:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.228231 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.228283 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.228301 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.228321 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.228388 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:46Z","lastTransitionTime":"2025-12-04T15:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.333199 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.333274 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.333295 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.333323 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.333344 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:46Z","lastTransitionTime":"2025-12-04T15:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.435664 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.435705 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.435713 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.435730 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.435740 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:46Z","lastTransitionTime":"2025-12-04T15:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.451693 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:46 crc kubenswrapper[4946]: E1204 15:03:46.451961 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.451802 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:46 crc kubenswrapper[4946]: E1204 15:03:46.452182 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.451763 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:46 crc kubenswrapper[4946]: E1204 15:03:46.452603 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.539409 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.539520 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.539591 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.539626 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.539649 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:46Z","lastTransitionTime":"2025-12-04T15:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.642137 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.642184 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.642217 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.642470 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.642489 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:46Z","lastTransitionTime":"2025-12-04T15:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.744528 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.744559 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.744567 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.744580 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.744588 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:46Z","lastTransitionTime":"2025-12-04T15:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.846480 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.846544 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.846555 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.846572 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.846959 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:46Z","lastTransitionTime":"2025-12-04T15:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.949404 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.949471 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.949492 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.949520 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:46 crc kubenswrapper[4946]: I1204 15:03:46.949537 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:46Z","lastTransitionTime":"2025-12-04T15:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.052288 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.052341 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.052355 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.052374 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.052387 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.154717 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.154763 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.154777 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.154796 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.154809 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.258272 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.258323 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.258336 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.258358 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.258373 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.360519 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.360563 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.360570 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.360585 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.360594 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.370417 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.370469 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.370484 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.370508 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.370522 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: E1204 15:03:47.386000 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:47Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.390938 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.390997 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.391020 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.391049 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.391071 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: E1204 15:03:47.411044 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:47Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.416465 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.416526 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.416543 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.416567 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.416586 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: E1204 15:03:47.436154 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:47Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.440813 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.440854 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.440866 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.440884 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.440896 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.452065 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:47 crc kubenswrapper[4946]: E1204 15:03:47.452255 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:47 crc kubenswrapper[4946]: E1204 15:03:47.456241 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:47Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.459733 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.459775 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.459784 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.459799 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.459809 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: E1204 15:03:47.471836 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:47Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:47 crc kubenswrapper[4946]: E1204 15:03:47.471990 4946 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.473627 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.473753 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.473838 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.473917 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.474019 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.576286 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.576344 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.576352 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.576366 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.576376 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.678565 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.678603 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.678614 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.678629 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.678640 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.781281 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.781581 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.781643 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.781701 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.781762 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.884728 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.885098 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.885266 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.885382 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.885541 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.988684 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.988736 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.988747 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.988766 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:47 crc kubenswrapper[4946]: I1204 15:03:47.988780 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:47Z","lastTransitionTime":"2025-12-04T15:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.092546 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.092586 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.092595 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.092624 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.092634 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:48Z","lastTransitionTime":"2025-12-04T15:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.195577 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.195612 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.195622 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.195638 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.195654 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:48Z","lastTransitionTime":"2025-12-04T15:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.298675 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.298759 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.298783 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.298813 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.298842 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:48Z","lastTransitionTime":"2025-12-04T15:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.410689 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.410736 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.410747 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.410764 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.410777 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:48Z","lastTransitionTime":"2025-12-04T15:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.452721 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:48 crc kubenswrapper[4946]: E1204 15:03:48.452874 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.452869 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.452944 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:48 crc kubenswrapper[4946]: E1204 15:03:48.452987 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:48 crc kubenswrapper[4946]: E1204 15:03:48.453186 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.513914 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.513970 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.513981 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.513996 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.514006 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:48Z","lastTransitionTime":"2025-12-04T15:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.615962 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.615996 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.616005 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.616017 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.616026 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:48Z","lastTransitionTime":"2025-12-04T15:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.718945 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.718994 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.719003 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.719018 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.719027 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:48Z","lastTransitionTime":"2025-12-04T15:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.821494 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.821527 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.821534 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.821548 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.821558 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:48Z","lastTransitionTime":"2025-12-04T15:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.924745 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.924793 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.924805 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.924823 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:48 crc kubenswrapper[4946]: I1204 15:03:48.924836 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:48Z","lastTransitionTime":"2025-12-04T15:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.027597 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.027970 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.028064 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.028206 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.028297 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:49Z","lastTransitionTime":"2025-12-04T15:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.131434 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.131480 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.131492 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.131511 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.131543 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:49Z","lastTransitionTime":"2025-12-04T15:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.234821 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.234879 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.234892 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.234913 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.234926 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:49Z","lastTransitionTime":"2025-12-04T15:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.338227 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.338296 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.338311 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.338335 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.338351 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:49Z","lastTransitionTime":"2025-12-04T15:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.441662 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.441740 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.441755 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.441781 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.441797 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:49Z","lastTransitionTime":"2025-12-04T15:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.452417 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:49 crc kubenswrapper[4946]: E1204 15:03:49.454021 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.469361 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.488131 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.504443 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.524566 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.538367 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.543985 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.544016 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.544024 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.544040 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.544049 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:49Z","lastTransitionTime":"2025-12-04T15:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.554617 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.572506 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.588393 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.611836 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:40Z\\\",\\\"message\\\":\\\"2025-12-04T15:02:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c\\\\n2025-12-04T15:02:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c to /host/opt/cni/bin/\\\\n2025-12-04T15:02:55Z [verbose] multus-daemon started\\\\n2025-12-04T15:02:55Z [verbose] Readiness Indicator file check\\\\n2025-12-04T15:03:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.643415 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:25Z\\\",\\\"message\\\":\\\"LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:25.287620 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:25.287647 6635 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.647238 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.647291 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.647311 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.647333 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.647364 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:49Z","lastTransitionTime":"2025-12-04T15:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.661336 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.681796 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.698033 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.711033 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.725312 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.738688 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.750728 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.750823 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.750840 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.750861 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.750873 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:49Z","lastTransitionTime":"2025-12-04T15:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.752492 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:49Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.854138 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.854180 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.854192 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.854213 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.854226 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:49Z","lastTransitionTime":"2025-12-04T15:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.957309 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.957347 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.957356 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.957372 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:49 crc kubenswrapper[4946]: I1204 15:03:49.957382 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:49Z","lastTransitionTime":"2025-12-04T15:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.060049 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.060108 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.060155 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.060177 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.060190 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:50Z","lastTransitionTime":"2025-12-04T15:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.162444 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.162489 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.162498 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.162514 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.162532 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:50Z","lastTransitionTime":"2025-12-04T15:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.265042 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.265087 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.265097 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.265406 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.265435 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:50Z","lastTransitionTime":"2025-12-04T15:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.368893 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.368990 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.369003 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.369442 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.369480 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:50Z","lastTransitionTime":"2025-12-04T15:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.452191 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:50 crc kubenswrapper[4946]: E1204 15:03:50.452356 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.452578 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:50 crc kubenswrapper[4946]: E1204 15:03:50.452629 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.452736 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:50 crc kubenswrapper[4946]: E1204 15:03:50.452792 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.472176 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.472228 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.472238 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.472255 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.472263 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:50Z","lastTransitionTime":"2025-12-04T15:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.575617 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.575947 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.576090 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.576328 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.576470 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:50Z","lastTransitionTime":"2025-12-04T15:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.679471 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.679533 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.679554 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.679581 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.679600 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:50Z","lastTransitionTime":"2025-12-04T15:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.782549 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.782926 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.783077 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.783196 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.783261 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:50Z","lastTransitionTime":"2025-12-04T15:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.886937 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.887302 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.887434 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.887530 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.887616 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:50Z","lastTransitionTime":"2025-12-04T15:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.990270 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.990307 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.990320 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.990336 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:50 crc kubenswrapper[4946]: I1204 15:03:50.990347 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:50Z","lastTransitionTime":"2025-12-04T15:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.093624 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.093659 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.093668 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.093683 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.093695 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:51Z","lastTransitionTime":"2025-12-04T15:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.196822 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.196869 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.196882 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.196900 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.196911 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:51Z","lastTransitionTime":"2025-12-04T15:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.300232 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.300286 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.300300 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.300320 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.300332 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:51Z","lastTransitionTime":"2025-12-04T15:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.403480 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.403533 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.403553 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.403575 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.403589 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:51Z","lastTransitionTime":"2025-12-04T15:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.452722 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:51 crc kubenswrapper[4946]: E1204 15:03:51.452908 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.507911 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.507995 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.508015 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.508046 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.508068 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:51Z","lastTransitionTime":"2025-12-04T15:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.611968 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.612050 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.612069 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.612103 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.612162 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:51Z","lastTransitionTime":"2025-12-04T15:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.715479 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.715762 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.715834 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.715945 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.716010 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:51Z","lastTransitionTime":"2025-12-04T15:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.821030 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.821081 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.821093 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.821125 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.821138 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:51Z","lastTransitionTime":"2025-12-04T15:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.924398 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.924462 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.924479 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.924504 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:51 crc kubenswrapper[4946]: I1204 15:03:51.924522 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:51Z","lastTransitionTime":"2025-12-04T15:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.055906 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.055975 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.055987 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.056006 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.056018 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:52Z","lastTransitionTime":"2025-12-04T15:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.159231 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.159291 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.159309 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.159331 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.159346 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:52Z","lastTransitionTime":"2025-12-04T15:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.261552 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.261599 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.261609 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.261628 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.261640 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:52Z","lastTransitionTime":"2025-12-04T15:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.364237 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.364312 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.364339 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.364364 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.364379 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:52Z","lastTransitionTime":"2025-12-04T15:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.452475 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.452491 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:52 crc kubenswrapper[4946]: E1204 15:03:52.452617 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.452640 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:52 crc kubenswrapper[4946]: E1204 15:03:52.452685 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:52 crc kubenswrapper[4946]: E1204 15:03:52.457510 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.458407 4946 scope.go:117] "RemoveContainer" containerID="096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.466918 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.466959 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.466971 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.466986 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.466996 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:52Z","lastTransitionTime":"2025-12-04T15:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.570071 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.570131 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.570144 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.570187 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.570199 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:52Z","lastTransitionTime":"2025-12-04T15:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.673035 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.673085 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.673097 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.673129 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.673142 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:52Z","lastTransitionTime":"2025-12-04T15:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.775507 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.775560 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.775572 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.775596 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.775612 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:52Z","lastTransitionTime":"2025-12-04T15:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.878903 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.878969 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.878986 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.879007 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.879020 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:52Z","lastTransitionTime":"2025-12-04T15:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.982545 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.982603 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.982613 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.982634 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:52 crc kubenswrapper[4946]: I1204 15:03:52.982649 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:52Z","lastTransitionTime":"2025-12-04T15:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.085628 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.085679 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.085698 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.085723 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.085741 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:53Z","lastTransitionTime":"2025-12-04T15:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.170715 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.171041 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:04:57.170998028 +0000 UTC m=+148.057041669 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.188912 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.188957 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.188974 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.188999 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.189018 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:53Z","lastTransitionTime":"2025-12-04T15:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.272271 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.272373 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.272417 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.272446 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.272590 4946 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.272665 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.272690 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.272705 4946 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.272711 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:04:57.272677059 +0000 UTC m=+148.158720730 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.272775 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 15:04:57.272750131 +0000 UTC m=+148.158793962 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.272905 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.272918 4946 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.272914 4946 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.272928 4946 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.273011 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 15:04:57.272990348 +0000 UTC m=+148.159034019 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.273046 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 15:04:57.2730346 +0000 UTC m=+148.159078241 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.292492 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.292571 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.292607 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.292644 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.292670 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:53Z","lastTransitionTime":"2025-12-04T15:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.396055 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.396137 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.396154 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.396174 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.396187 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:53Z","lastTransitionTime":"2025-12-04T15:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.452112 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:53 crc kubenswrapper[4946]: E1204 15:03:53.452301 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.499512 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.499586 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.499607 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.499630 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.499647 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:53Z","lastTransitionTime":"2025-12-04T15:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.603041 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.603102 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.603147 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.603171 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.603189 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:53Z","lastTransitionTime":"2025-12-04T15:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.705709 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.705778 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.705793 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.705813 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.705827 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:53Z","lastTransitionTime":"2025-12-04T15:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.808459 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.808517 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.808537 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.808562 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.808581 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:53Z","lastTransitionTime":"2025-12-04T15:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.910960 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.911006 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.911017 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.911033 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:53 crc kubenswrapper[4946]: I1204 15:03:53.911045 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:53Z","lastTransitionTime":"2025-12-04T15:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.014916 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.014980 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.014995 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.015021 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.015037 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:54Z","lastTransitionTime":"2025-12-04T15:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.066759 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/2.log" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.069526 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.070708 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1"} Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.071483 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.087338 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.099922 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.112226 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.117202 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.117228 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.117237 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.117253 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.117263 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:54Z","lastTransitionTime":"2025-12-04T15:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.124910 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.139042 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.151601 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.165973 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.181988 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.202953 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.217453 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.219518 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.219592 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.219604 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.219623 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.219653 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:54Z","lastTransitionTime":"2025-12-04T15:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.232895 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.250710 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.265647 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.283640 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.300499 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:40Z\\\",\\\"message\\\":\\\"2025-12-04T15:02:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c\\\\n2025-12-04T15:02:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c to /host/opt/cni/bin/\\\\n2025-12-04T15:02:55Z [verbose] multus-daemon started\\\\n2025-12-04T15:02:55Z [verbose] Readiness Indicator file check\\\\n2025-12-04T15:03:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.322185 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.322232 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.322242 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.322260 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.322272 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:54Z","lastTransitionTime":"2025-12-04T15:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.332193 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:25Z\\\",\\\"message\\\":\\\"LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:25.287620 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:25.287647 6635 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.345850 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:54Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.425422 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.425462 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.425476 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.425498 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.425511 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:54Z","lastTransitionTime":"2025-12-04T15:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.451914 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.452025 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:54 crc kubenswrapper[4946]: E1204 15:03:54.452089 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:54 crc kubenswrapper[4946]: E1204 15:03:54.452371 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.452457 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:54 crc kubenswrapper[4946]: E1204 15:03:54.452642 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.528501 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.528548 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.528560 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.528578 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.528590 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:54Z","lastTransitionTime":"2025-12-04T15:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.631667 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.631743 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.631753 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.631771 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.631781 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:54Z","lastTransitionTime":"2025-12-04T15:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.733996 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.734046 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.734055 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.734070 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.734080 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:54Z","lastTransitionTime":"2025-12-04T15:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.837223 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.837615 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.837628 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.837662 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.837672 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:54Z","lastTransitionTime":"2025-12-04T15:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.940299 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.940395 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.940414 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.940437 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:54 crc kubenswrapper[4946]: I1204 15:03:54.940452 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:54Z","lastTransitionTime":"2025-12-04T15:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.042912 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.042961 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.042978 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.043002 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.043017 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:55Z","lastTransitionTime":"2025-12-04T15:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.076439 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/3.log" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.076984 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/2.log" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.080034 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.081083 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1" exitCode=1 Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.081161 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1"} Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.081234 4946 scope.go:117] "RemoveContainer" containerID="096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.081814 4946 scope.go:117] "RemoveContainer" containerID="0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1" Dec 04 15:03:55 crc kubenswrapper[4946]: E1204 15:03:55.082035 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.099632 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:40Z\\\",\\\"message\\\":\\\"2025-12-04T15:02:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c\\\\n2025-12-04T15:02:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c to /host/opt/cni/bin/\\\\n2025-12-04T15:02:55Z [verbose] multus-daemon started\\\\n2025-12-04T15:02:55Z [verbose] Readiness Indicator file check\\\\n2025-12-04T15:03:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.121945 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://096a927dce08a952d173b28de25c540f2588d1079e90527460f115d04bc564c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:25Z\\\",\\\"message\\\":\\\"LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.88\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1204 15:03:25.287620 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:25Z is after 2025-08-24T17:21:41Z]\\\\nI1204 15:03:25.287647 6635 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:54Z\\\",\\\"message\\\":\\\")\\\\nI1204 15:03:54.792043 7020 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 15:03:54.792099 7020 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-fjmh5\\\\nI1204 15:03:54.791956 7020 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-jdjs9\\\\nI1204 15:03:54.792168 7020 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-jdjs9 in node crc\\\\nI1204 15:03:54.792176 7020 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-jdjs9 after 0 failed attempt(s)\\\\nI1204 15:03:54.792182 7020 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-jdjs9\\\\nI1204 15:03:54.792012 7020 obj_retry.go:303] R\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.140324 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.145144 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.145212 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.145225 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.145239 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.145248 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:55Z","lastTransitionTime":"2025-12-04T15:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.163625 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.183345 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.198372 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.210757 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.223167 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.231979 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.242853 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.247651 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.247698 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.247710 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.247730 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.247741 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:55Z","lastTransitionTime":"2025-12-04T15:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.254133 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.267367 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.278866 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.292256 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.308978 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.327059 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.348785 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:55Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.350205 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.350254 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.350265 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.350284 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.350295 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:55Z","lastTransitionTime":"2025-12-04T15:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.452182 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:55 crc kubenswrapper[4946]: E1204 15:03:55.452395 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.453495 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.453528 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.453538 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.453553 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.453564 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:55Z","lastTransitionTime":"2025-12-04T15:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.556904 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.556955 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.556966 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.556987 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.557000 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:55Z","lastTransitionTime":"2025-12-04T15:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.659345 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.659415 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.659436 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.659465 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.659485 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:55Z","lastTransitionTime":"2025-12-04T15:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.761560 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.761601 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.761611 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.761624 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.761634 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:55Z","lastTransitionTime":"2025-12-04T15:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.864150 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.864197 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.864205 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.864221 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.864233 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:55Z","lastTransitionTime":"2025-12-04T15:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.967756 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.967815 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.967827 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.967851 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:55 crc kubenswrapper[4946]: I1204 15:03:55.967864 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:55Z","lastTransitionTime":"2025-12-04T15:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.070813 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.070856 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.070866 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.070884 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.070895 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:56Z","lastTransitionTime":"2025-12-04T15:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.085900 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/3.log" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.088586 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.089885 4946 scope.go:117] "RemoveContainer" containerID="0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1" Dec 04 15:03:56 crc kubenswrapper[4946]: E1204 15:03:56.090015 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.111222 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.128829 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.146201 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.159400 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.171751 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.173420 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.173496 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.173510 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.173557 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.173575 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:56Z","lastTransitionTime":"2025-12-04T15:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.190577 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:54Z\\\",\\\"message\\\":\\\")\\\\nI1204 15:03:54.792043 7020 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 15:03:54.792099 7020 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-fjmh5\\\\nI1204 15:03:54.791956 7020 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-jdjs9\\\\nI1204 15:03:54.792168 7020 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-jdjs9 in node crc\\\\nI1204 15:03:54.792176 7020 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-jdjs9 after 0 failed attempt(s)\\\\nI1204 15:03:54.792182 7020 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-jdjs9\\\\nI1204 15:03:54.792012 7020 obj_retry.go:303] R\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.201985 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.212383 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:40Z\\\",\\\"message\\\":\\\"2025-12-04T15:02:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c\\\\n2025-12-04T15:02:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c to /host/opt/cni/bin/\\\\n2025-12-04T15:02:55Z [verbose] multus-daemon started\\\\n2025-12-04T15:02:55Z [verbose] Readiness Indicator file check\\\\n2025-12-04T15:03:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.222111 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.235635 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.245176 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.256379 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.267223 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.276101 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.276237 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.276254 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.276276 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.276290 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:56Z","lastTransitionTime":"2025-12-04T15:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.280771 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.297867 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.313883 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.329378 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:56Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.378373 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.378408 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.378418 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.378431 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.378440 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:56Z","lastTransitionTime":"2025-12-04T15:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.452542 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.452746 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.452970 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:56 crc kubenswrapper[4946]: E1204 15:03:56.452911 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:56 crc kubenswrapper[4946]: E1204 15:03:56.453229 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:56 crc kubenswrapper[4946]: E1204 15:03:56.453396 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.481756 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.481834 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.481849 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.481873 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.481888 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:56Z","lastTransitionTime":"2025-12-04T15:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.584871 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.584917 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.584925 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.584938 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.584947 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:56Z","lastTransitionTime":"2025-12-04T15:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.687376 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.687426 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.687435 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.687449 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.687459 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:56Z","lastTransitionTime":"2025-12-04T15:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.789607 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.789656 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.789667 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.789684 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.789695 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:56Z","lastTransitionTime":"2025-12-04T15:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.893312 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.893408 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.893419 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.893439 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.893452 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:56Z","lastTransitionTime":"2025-12-04T15:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.997042 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.997173 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.997210 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.997242 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:56 crc kubenswrapper[4946]: I1204 15:03:56.997262 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:56Z","lastTransitionTime":"2025-12-04T15:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.099843 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.099888 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.099902 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.099918 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.099930 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.202925 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.202963 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.202972 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.202987 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.202997 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.305195 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.305248 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.305259 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.305276 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.305285 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.407711 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.407797 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.407810 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.407828 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.407839 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.452081 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:57 crc kubenswrapper[4946]: E1204 15:03:57.452239 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.510395 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.510435 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.510445 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.510460 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.510469 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.613764 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.613808 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.613819 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.613840 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.613850 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.707353 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.707415 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.707428 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.707447 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.707459 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: E1204 15:03:57.719935 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.724638 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.724678 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.724688 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.724704 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.724713 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: E1204 15:03:57.739134 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.743971 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.744031 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.744044 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.744066 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.744079 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: E1204 15:03:57.758541 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.763349 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.763386 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.763396 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.763412 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.763422 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: E1204 15:03:57.782623 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.787360 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.787410 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.787424 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.787448 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.787464 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: E1204 15:03:57.803501 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:57Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:57 crc kubenswrapper[4946]: E1204 15:03:57.803760 4946 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.805388 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.805436 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.805452 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.805475 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.805489 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.907330 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.907422 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.907440 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.907458 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:57 crc kubenswrapper[4946]: I1204 15:03:57.907470 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:57Z","lastTransitionTime":"2025-12-04T15:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.009963 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.010005 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.010017 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.010034 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.010046 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:58Z","lastTransitionTime":"2025-12-04T15:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.112042 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.112181 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.112202 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.112221 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.112232 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:58Z","lastTransitionTime":"2025-12-04T15:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.214598 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.214669 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.214688 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.214711 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.214731 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:58Z","lastTransitionTime":"2025-12-04T15:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.317720 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.317764 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.317776 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.317792 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.317805 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:58Z","lastTransitionTime":"2025-12-04T15:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.420424 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.420461 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.420490 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.420506 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.420516 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:58Z","lastTransitionTime":"2025-12-04T15:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.451690 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.451764 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.451774 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:03:58 crc kubenswrapper[4946]: E1204 15:03:58.452004 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:03:58 crc kubenswrapper[4946]: E1204 15:03:58.452197 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:03:58 crc kubenswrapper[4946]: E1204 15:03:58.452400 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.467682 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.523337 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.523425 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.523447 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.523476 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.523496 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:58Z","lastTransitionTime":"2025-12-04T15:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.626063 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.626110 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.626141 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.626159 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.626169 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:58Z","lastTransitionTime":"2025-12-04T15:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.728454 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.728517 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.728532 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.728553 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.728589 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:58Z","lastTransitionTime":"2025-12-04T15:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.831743 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.831813 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.831832 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.831864 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.831884 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:58Z","lastTransitionTime":"2025-12-04T15:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.935405 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.935466 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.935478 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.935499 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:58 crc kubenswrapper[4946]: I1204 15:03:58.935515 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:58Z","lastTransitionTime":"2025-12-04T15:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.037971 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.038027 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.038041 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.038063 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.038080 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:59Z","lastTransitionTime":"2025-12-04T15:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.140267 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.140692 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.140849 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.141005 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.141165 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:59Z","lastTransitionTime":"2025-12-04T15:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.244544 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.244613 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.244635 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.244666 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.244690 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:59Z","lastTransitionTime":"2025-12-04T15:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.347698 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.347755 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.347767 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.347790 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.347803 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:59Z","lastTransitionTime":"2025-12-04T15:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.450503 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.450542 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.450553 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.450571 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.450583 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:59Z","lastTransitionTime":"2025-12-04T15:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.451742 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:03:59 crc kubenswrapper[4946]: E1204 15:03:59.452109 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.469085 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.487689 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.510946 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.529870 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.546140 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.552958 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.553025 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.553034 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.553058 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.553070 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:59Z","lastTransitionTime":"2025-12-04T15:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.560644 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.574517 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.585721 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd6b836-f508-4a4d-8981-b4d8371aba43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42aa2de3e954b29b1d06dbfa70cfce3fd4ab67429abbbd4ec07aac4284f32e36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d188cd5f6e35af6fe300e8761b2d502b44b37c0b2edc2f09754bf85fa67f0d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d188cd5f6e35af6fe300e8761b2d502b44b37c0b2edc2f09754bf85fa67f0d57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.595940 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.609447 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:40Z\\\",\\\"message\\\":\\\"2025-12-04T15:02:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c\\\\n2025-12-04T15:02:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c to /host/opt/cni/bin/\\\\n2025-12-04T15:02:55Z [verbose] multus-daemon started\\\\n2025-12-04T15:02:55Z [verbose] Readiness Indicator file check\\\\n2025-12-04T15:03:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.629240 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:54Z\\\",\\\"message\\\":\\\")\\\\nI1204 15:03:54.792043 7020 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 15:03:54.792099 7020 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-fjmh5\\\\nI1204 15:03:54.791956 7020 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-jdjs9\\\\nI1204 15:03:54.792168 7020 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-jdjs9 in node crc\\\\nI1204 15:03:54.792176 7020 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-jdjs9 after 0 failed attempt(s)\\\\nI1204 15:03:54.792182 7020 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-jdjs9\\\\nI1204 15:03:54.792012 7020 obj_retry.go:303] R\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.645327 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.655852 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.655890 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.655904 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.655923 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.655936 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:59Z","lastTransitionTime":"2025-12-04T15:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.657012 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.671766 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.685067 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.698969 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.713016 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.727157 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:03:59Z is after 2025-08-24T17:21:41Z" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.758599 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.758664 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.758682 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.758706 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.758722 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:59Z","lastTransitionTime":"2025-12-04T15:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.861189 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.861227 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.861237 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.861254 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.861264 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:59Z","lastTransitionTime":"2025-12-04T15:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.963952 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.964010 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.964029 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.964053 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:03:59 crc kubenswrapper[4946]: I1204 15:03:59.964069 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:03:59Z","lastTransitionTime":"2025-12-04T15:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.066558 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.066590 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.066599 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.066612 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.066621 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:00Z","lastTransitionTime":"2025-12-04T15:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.168795 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.168871 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.168896 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.168929 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.168987 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:00Z","lastTransitionTime":"2025-12-04T15:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.272027 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.272080 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.272095 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.272132 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.272147 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:00Z","lastTransitionTime":"2025-12-04T15:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.374290 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.374327 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.374336 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.374369 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.374379 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:00Z","lastTransitionTime":"2025-12-04T15:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.452487 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.452572 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:00 crc kubenswrapper[4946]: E1204 15:04:00.452623 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.452634 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:00 crc kubenswrapper[4946]: E1204 15:04:00.452744 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:00 crc kubenswrapper[4946]: E1204 15:04:00.452803 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.476757 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.476817 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.476830 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.476849 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.477192 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:00Z","lastTransitionTime":"2025-12-04T15:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.579517 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.579571 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.579579 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.579595 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.579605 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:00Z","lastTransitionTime":"2025-12-04T15:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.681737 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.681768 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.681779 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.681792 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.681801 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:00Z","lastTransitionTime":"2025-12-04T15:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.784483 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.784534 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.784550 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.784574 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.784590 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:00Z","lastTransitionTime":"2025-12-04T15:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.887314 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.887357 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.887369 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.887384 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.887396 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:00Z","lastTransitionTime":"2025-12-04T15:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.989736 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.989784 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.989798 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.989816 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:00 crc kubenswrapper[4946]: I1204 15:04:00.989828 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:00Z","lastTransitionTime":"2025-12-04T15:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.092681 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.092741 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.092752 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.092770 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.092785 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:01Z","lastTransitionTime":"2025-12-04T15:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.194935 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.195439 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.195567 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.195656 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.195733 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:01Z","lastTransitionTime":"2025-12-04T15:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.298055 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.298086 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.298095 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.298109 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.298147 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:01Z","lastTransitionTime":"2025-12-04T15:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.400953 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.401634 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.401754 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.401839 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.401915 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:01Z","lastTransitionTime":"2025-12-04T15:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.452347 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:01 crc kubenswrapper[4946]: E1204 15:04:01.452479 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.504258 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.504546 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.504798 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.504980 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.505146 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:01Z","lastTransitionTime":"2025-12-04T15:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.607942 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.608219 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.608240 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.608257 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.608268 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:01Z","lastTransitionTime":"2025-12-04T15:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.711042 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.711400 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.711468 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.711532 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.711624 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:01Z","lastTransitionTime":"2025-12-04T15:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.814967 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.815410 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.815434 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.815461 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.815482 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:01Z","lastTransitionTime":"2025-12-04T15:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.920584 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.920670 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.920696 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.920728 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:01 crc kubenswrapper[4946]: I1204 15:04:01.920749 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:01Z","lastTransitionTime":"2025-12-04T15:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.024555 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.024629 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.024655 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.024690 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.024716 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:02Z","lastTransitionTime":"2025-12-04T15:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.127442 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.127520 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.127535 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.127555 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.128036 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:02Z","lastTransitionTime":"2025-12-04T15:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.230478 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.230514 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.230523 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.230537 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.230548 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:02Z","lastTransitionTime":"2025-12-04T15:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.333345 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.333423 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.333444 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.333470 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.333488 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:02Z","lastTransitionTime":"2025-12-04T15:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.436810 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.436890 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.436907 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.436934 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.436949 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:02Z","lastTransitionTime":"2025-12-04T15:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.452738 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:02 crc kubenswrapper[4946]: E1204 15:04:02.452927 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.452768 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:02 crc kubenswrapper[4946]: E1204 15:04:02.453039 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.452738 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:02 crc kubenswrapper[4946]: E1204 15:04:02.453163 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.539508 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.539545 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.539558 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.539576 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.539592 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:02Z","lastTransitionTime":"2025-12-04T15:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.641980 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.642027 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.642041 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.642062 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.642073 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:02Z","lastTransitionTime":"2025-12-04T15:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.744318 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.744358 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.744369 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.744388 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.744401 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:02Z","lastTransitionTime":"2025-12-04T15:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.847534 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.847600 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.847618 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.847645 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.847664 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:02Z","lastTransitionTime":"2025-12-04T15:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.949866 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.949901 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.949909 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.949925 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:02 crc kubenswrapper[4946]: I1204 15:04:02.949935 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:02Z","lastTransitionTime":"2025-12-04T15:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.052442 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.052486 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.052498 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.052516 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.052528 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:03Z","lastTransitionTime":"2025-12-04T15:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.155512 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.155544 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.155553 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.155567 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.155577 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:03Z","lastTransitionTime":"2025-12-04T15:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.258621 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.258671 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.258687 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.258711 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.258727 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:03Z","lastTransitionTime":"2025-12-04T15:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.361454 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.361494 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.361504 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.361519 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.361529 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:03Z","lastTransitionTime":"2025-12-04T15:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.454911 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:03 crc kubenswrapper[4946]: E1204 15:04:03.455104 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.464705 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.464809 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.464834 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.464864 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.464884 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:03Z","lastTransitionTime":"2025-12-04T15:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.567834 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.567876 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.567888 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.567905 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.567915 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:03Z","lastTransitionTime":"2025-12-04T15:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.670438 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.670545 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.670647 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.670738 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.670769 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:03Z","lastTransitionTime":"2025-12-04T15:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.773699 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.773756 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.773771 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.773794 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.773814 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:03Z","lastTransitionTime":"2025-12-04T15:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.876103 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.876162 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.876180 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.876201 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.876213 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:03Z","lastTransitionTime":"2025-12-04T15:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.980827 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.980876 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.980888 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.980904 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:03 crc kubenswrapper[4946]: I1204 15:04:03.980913 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:03Z","lastTransitionTime":"2025-12-04T15:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.083550 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.083600 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.083630 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.083648 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.083657 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:04Z","lastTransitionTime":"2025-12-04T15:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.186567 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.186886 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.186990 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.187081 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.187192 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:04Z","lastTransitionTime":"2025-12-04T15:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.289403 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.289461 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.289472 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.289490 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.289502 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:04Z","lastTransitionTime":"2025-12-04T15:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.392604 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.392645 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.392655 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.392672 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.392685 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:04Z","lastTransitionTime":"2025-12-04T15:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.451997 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:04 crc kubenswrapper[4946]: E1204 15:04:04.452152 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.452326 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.452339 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:04 crc kubenswrapper[4946]: E1204 15:04:04.452414 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:04 crc kubenswrapper[4946]: E1204 15:04:04.452477 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.495174 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.495224 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.495237 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.495256 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.495270 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:04Z","lastTransitionTime":"2025-12-04T15:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.597499 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.597555 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.597571 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.597594 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.597613 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:04Z","lastTransitionTime":"2025-12-04T15:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.699842 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.699886 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.699902 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.699921 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.699932 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:04Z","lastTransitionTime":"2025-12-04T15:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.802489 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.802519 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.802532 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.802547 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.802559 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:04Z","lastTransitionTime":"2025-12-04T15:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.906405 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.906467 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.906479 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.906499 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:04 crc kubenswrapper[4946]: I1204 15:04:04.906516 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:04Z","lastTransitionTime":"2025-12-04T15:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.008793 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.008833 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.008846 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.008863 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.008876 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:05Z","lastTransitionTime":"2025-12-04T15:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.111054 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.111155 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.111182 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.111211 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.111235 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:05Z","lastTransitionTime":"2025-12-04T15:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.215525 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.215577 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.215596 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.215618 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.215637 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:05Z","lastTransitionTime":"2025-12-04T15:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.318132 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.318174 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.318183 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.318199 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.318209 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:05Z","lastTransitionTime":"2025-12-04T15:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.421867 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.421902 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.421910 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.421925 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.421934 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:05Z","lastTransitionTime":"2025-12-04T15:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.452486 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:05 crc kubenswrapper[4946]: E1204 15:04:05.452783 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.524163 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.524212 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.524224 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.524239 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.524253 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:05Z","lastTransitionTime":"2025-12-04T15:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.627372 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.627410 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.627421 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.627438 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.627452 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:05Z","lastTransitionTime":"2025-12-04T15:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.729929 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.729967 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.729977 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.729996 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.730008 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:05Z","lastTransitionTime":"2025-12-04T15:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.832624 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.832661 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.832670 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.832687 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.832697 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:05Z","lastTransitionTime":"2025-12-04T15:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.934558 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.934630 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.934642 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.934660 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:05 crc kubenswrapper[4946]: I1204 15:04:05.934673 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:05Z","lastTransitionTime":"2025-12-04T15:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.037285 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.037322 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.037330 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.037345 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.037355 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:06Z","lastTransitionTime":"2025-12-04T15:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.140980 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.141043 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.141055 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.141070 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.141083 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:06Z","lastTransitionTime":"2025-12-04T15:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.244017 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.244477 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.244628 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.244775 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.244920 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:06Z","lastTransitionTime":"2025-12-04T15:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.348007 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.348070 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.348086 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.348111 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.348188 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:06Z","lastTransitionTime":"2025-12-04T15:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.451267 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.451315 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.451330 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.451350 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.451365 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:06Z","lastTransitionTime":"2025-12-04T15:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.452060 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:06 crc kubenswrapper[4946]: E1204 15:04:06.452255 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.452501 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:06 crc kubenswrapper[4946]: E1204 15:04:06.452640 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.452826 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:06 crc kubenswrapper[4946]: E1204 15:04:06.453022 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.554172 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.554679 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.554766 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.554847 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.554915 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:06Z","lastTransitionTime":"2025-12-04T15:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.657569 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.657883 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.658007 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.658195 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.658477 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:06Z","lastTransitionTime":"2025-12-04T15:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.760797 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.760862 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.760889 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.760920 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.760943 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:06Z","lastTransitionTime":"2025-12-04T15:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.863904 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.863944 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.863956 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.863976 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.863988 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:06Z","lastTransitionTime":"2025-12-04T15:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.967075 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.967154 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.967167 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.967184 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:06 crc kubenswrapper[4946]: I1204 15:04:06.967197 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:06Z","lastTransitionTime":"2025-12-04T15:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.070293 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.070356 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.070379 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.070403 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.070418 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.173215 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.173268 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.173282 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.173301 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.173315 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.275636 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.275692 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.275708 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.275730 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.275747 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.379260 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.379323 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.379340 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.379362 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.379381 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.451755 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:07 crc kubenswrapper[4946]: E1204 15:04:07.451953 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.481784 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.481816 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.481826 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.481842 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.481852 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.584453 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.584500 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.584511 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.584528 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.584542 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.687346 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.687406 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.687417 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.687434 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.687446 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.789760 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.789808 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.789820 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.789836 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.789845 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.892601 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.892659 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.892677 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.892704 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.892723 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.922745 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.922977 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.923072 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.923173 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.923247 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: E1204 15:04:07.935501 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.939536 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.939601 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.939627 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.939657 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.939680 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: E1204 15:04:07.957645 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.961851 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.961886 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.961898 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.961918 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.961931 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: E1204 15:04:07.974383 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.979199 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.979349 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.979431 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.979516 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.979602 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:07 crc kubenswrapper[4946]: E1204 15:04:07.993345 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:07Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.998001 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.998232 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.998427 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.998541 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:07 crc kubenswrapper[4946]: I1204 15:04:07.998643 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:07Z","lastTransitionTime":"2025-12-04T15:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:08 crc kubenswrapper[4946]: E1204 15:04:08.012353 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:08Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:08 crc kubenswrapper[4946]: E1204 15:04:08.012483 4946 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.014014 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.014043 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.014051 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.014064 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.014074 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:08Z","lastTransitionTime":"2025-12-04T15:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.116703 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.117029 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.117215 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.117342 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.117490 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:08Z","lastTransitionTime":"2025-12-04T15:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.221004 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.221040 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.221056 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.221074 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.221088 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:08Z","lastTransitionTime":"2025-12-04T15:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.323181 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.323231 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.323245 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.323264 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.323280 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:08Z","lastTransitionTime":"2025-12-04T15:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.425344 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.425413 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.425457 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.425486 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.425507 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:08Z","lastTransitionTime":"2025-12-04T15:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.452165 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.452215 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:08 crc kubenswrapper[4946]: E1204 15:04:08.452318 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.452337 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:08 crc kubenswrapper[4946]: E1204 15:04:08.452599 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:08 crc kubenswrapper[4946]: E1204 15:04:08.452796 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.528490 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.528555 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.528566 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.528583 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.528598 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:08Z","lastTransitionTime":"2025-12-04T15:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.631751 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.631832 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.631857 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.631888 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.631912 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:08Z","lastTransitionTime":"2025-12-04T15:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.735203 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.735263 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.735274 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.735293 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.735310 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:08Z","lastTransitionTime":"2025-12-04T15:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.838451 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.838534 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.838547 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.838563 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.838573 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:08Z","lastTransitionTime":"2025-12-04T15:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.941028 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.941075 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.941087 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.941106 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:08 crc kubenswrapper[4946]: I1204 15:04:08.941144 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:08Z","lastTransitionTime":"2025-12-04T15:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.043596 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.043678 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.043701 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.043737 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.043775 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:09Z","lastTransitionTime":"2025-12-04T15:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.140170 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:09 crc kubenswrapper[4946]: E1204 15:04:09.140377 4946 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:04:09 crc kubenswrapper[4946]: E1204 15:04:09.140497 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs podName:0a3cccbb-17c2-487d-a952-6b5d50656e2a nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.140471256 +0000 UTC m=+164.026514897 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs") pod "network-metrics-daemon-9xbtr" (UID: "0a3cccbb-17c2-487d-a952-6b5d50656e2a") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.146974 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.147040 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.147056 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.147078 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.147092 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:09Z","lastTransitionTime":"2025-12-04T15:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.249738 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.249791 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.249804 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.249828 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.249843 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:09Z","lastTransitionTime":"2025-12-04T15:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.352783 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.352846 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.352862 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.352886 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.352902 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:09Z","lastTransitionTime":"2025-12-04T15:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.452369 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:09 crc kubenswrapper[4946]: E1204 15:04:09.453038 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.453430 4946 scope.go:117] "RemoveContainer" containerID="0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1" Dec 04 15:04:09 crc kubenswrapper[4946]: E1204 15:04:09.453644 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.459715 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.459793 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.460007 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.460036 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.460055 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:09Z","lastTransitionTime":"2025-12-04T15:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.474507 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.490760 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.506906 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.524849 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.542671 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.558808 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.563379 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.563422 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.563434 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.563452 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.563465 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:09Z","lastTransitionTime":"2025-12-04T15:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.576294 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.589591 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd6b836-f508-4a4d-8981-b4d8371aba43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42aa2de3e954b29b1d06dbfa70cfce3fd4ab67429abbbd4ec07aac4284f32e36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d188cd5f6e35af6fe300e8761b2d502b44b37c0b2edc2f09754bf85fa67f0d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d188cd5f6e35af6fe300e8761b2d502b44b37c0b2edc2f09754bf85fa67f0d57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.602741 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.620580 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:40Z\\\",\\\"message\\\":\\\"2025-12-04T15:02:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c\\\\n2025-12-04T15:02:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c to /host/opt/cni/bin/\\\\n2025-12-04T15:02:55Z [verbose] multus-daemon started\\\\n2025-12-04T15:02:55Z [verbose] Readiness Indicator file check\\\\n2025-12-04T15:03:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.645676 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:54Z\\\",\\\"message\\\":\\\")\\\\nI1204 15:03:54.792043 7020 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 15:03:54.792099 7020 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-fjmh5\\\\nI1204 15:03:54.791956 7020 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-jdjs9\\\\nI1204 15:03:54.792168 7020 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-jdjs9 in node crc\\\\nI1204 15:03:54.792176 7020 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-jdjs9 after 0 failed attempt(s)\\\\nI1204 15:03:54.792182 7020 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-jdjs9\\\\nI1204 15:03:54.792012 7020 obj_retry.go:303] R\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.661940 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.667638 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.667679 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.667691 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.667709 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.667723 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:09Z","lastTransitionTime":"2025-12-04T15:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.675565 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.688503 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.699977 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.715835 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.726923 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.741161 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:09Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.770311 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.770407 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.770420 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.770440 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.770453 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:09Z","lastTransitionTime":"2025-12-04T15:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.873250 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.873279 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.873288 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.873302 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.873311 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:09Z","lastTransitionTime":"2025-12-04T15:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.976107 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.976172 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.976183 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.976197 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:09 crc kubenswrapper[4946]: I1204 15:04:09.976206 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:09Z","lastTransitionTime":"2025-12-04T15:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.079008 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.079075 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.079094 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.079168 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.079194 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:10Z","lastTransitionTime":"2025-12-04T15:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.181514 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.181551 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.181562 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.181576 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.181584 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:10Z","lastTransitionTime":"2025-12-04T15:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.283390 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.283433 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.283444 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.283461 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.283472 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:10Z","lastTransitionTime":"2025-12-04T15:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.386045 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.386081 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.386090 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.386104 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.386132 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:10Z","lastTransitionTime":"2025-12-04T15:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.451740 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:10 crc kubenswrapper[4946]: E1204 15:04:10.451894 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.451923 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:10 crc kubenswrapper[4946]: E1204 15:04:10.452032 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.452092 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:10 crc kubenswrapper[4946]: E1204 15:04:10.452183 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.492492 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.492677 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.492720 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.492782 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.492795 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:10Z","lastTransitionTime":"2025-12-04T15:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.596490 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.596522 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.596533 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.596551 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.596562 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:10Z","lastTransitionTime":"2025-12-04T15:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.700060 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.700955 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.701057 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.701185 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.701674 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:10Z","lastTransitionTime":"2025-12-04T15:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.804089 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.804143 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.804155 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.804171 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.804182 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:10Z","lastTransitionTime":"2025-12-04T15:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.906973 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.907020 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.907033 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.907051 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:10 crc kubenswrapper[4946]: I1204 15:04:10.907061 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:10Z","lastTransitionTime":"2025-12-04T15:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.009917 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.009959 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.009970 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.009988 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.009999 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:11Z","lastTransitionTime":"2025-12-04T15:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.112771 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.112825 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.112836 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.112857 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.112871 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:11Z","lastTransitionTime":"2025-12-04T15:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.216177 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.216241 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.216258 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.216280 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.216301 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:11Z","lastTransitionTime":"2025-12-04T15:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.319331 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.319406 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.319420 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.319438 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.319452 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:11Z","lastTransitionTime":"2025-12-04T15:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.422008 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.422059 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.422072 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.422090 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.422103 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:11Z","lastTransitionTime":"2025-12-04T15:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.452559 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:11 crc kubenswrapper[4946]: E1204 15:04:11.452835 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.466725 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.525186 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.525466 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.525560 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.525649 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.525765 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:11Z","lastTransitionTime":"2025-12-04T15:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.628667 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.628727 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.628742 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.628766 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.628781 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:11Z","lastTransitionTime":"2025-12-04T15:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.731741 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.732332 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.732408 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.732496 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.732560 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:11Z","lastTransitionTime":"2025-12-04T15:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.834750 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.834788 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.834799 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.834817 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.834828 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:11Z","lastTransitionTime":"2025-12-04T15:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.937734 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.937997 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.938065 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.938164 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:11 crc kubenswrapper[4946]: I1204 15:04:11.938249 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:11Z","lastTransitionTime":"2025-12-04T15:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.040983 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.041056 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.041077 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.041102 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.041149 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:12Z","lastTransitionTime":"2025-12-04T15:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.143558 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.143915 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.144099 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.144506 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.144991 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:12Z","lastTransitionTime":"2025-12-04T15:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.248027 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.248172 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.248192 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.248219 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.248239 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:12Z","lastTransitionTime":"2025-12-04T15:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.351438 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.351482 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.351491 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.351507 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.351516 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:12Z","lastTransitionTime":"2025-12-04T15:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.451704 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.451746 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:12 crc kubenswrapper[4946]: E1204 15:04:12.452092 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:12 crc kubenswrapper[4946]: E1204 15:04:12.452299 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.451826 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:12 crc kubenswrapper[4946]: E1204 15:04:12.452532 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.453523 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.453551 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.453560 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.453576 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.453587 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:12Z","lastTransitionTime":"2025-12-04T15:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.557195 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.557252 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.557264 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.557283 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.557295 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:12Z","lastTransitionTime":"2025-12-04T15:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.660728 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.660785 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.660802 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.660826 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.660844 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:12Z","lastTransitionTime":"2025-12-04T15:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.764210 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.764274 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.764288 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.764311 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.764326 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:12Z","lastTransitionTime":"2025-12-04T15:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.867790 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.867849 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.867870 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.867904 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.867931 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:12Z","lastTransitionTime":"2025-12-04T15:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.970548 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.970604 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.970623 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.970647 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:12 crc kubenswrapper[4946]: I1204 15:04:12.970665 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:12Z","lastTransitionTime":"2025-12-04T15:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.074194 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.074279 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.074328 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.074355 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.074398 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:13Z","lastTransitionTime":"2025-12-04T15:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.176818 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.176872 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.176882 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.176899 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.176911 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:13Z","lastTransitionTime":"2025-12-04T15:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.279480 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.279515 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.279526 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.279541 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.279552 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:13Z","lastTransitionTime":"2025-12-04T15:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.382610 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.382858 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.382956 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.383047 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.383161 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:13Z","lastTransitionTime":"2025-12-04T15:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.452585 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:13 crc kubenswrapper[4946]: E1204 15:04:13.452967 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.486007 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.486055 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.486066 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.486081 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.486095 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:13Z","lastTransitionTime":"2025-12-04T15:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.588297 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.588327 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.588337 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.588351 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.588360 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:13Z","lastTransitionTime":"2025-12-04T15:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.690645 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.690684 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.690695 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.690712 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.690725 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:13Z","lastTransitionTime":"2025-12-04T15:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.792640 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.792672 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.792681 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.792695 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.792716 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:13Z","lastTransitionTime":"2025-12-04T15:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.895558 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.895601 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.895612 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.895629 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.895641 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:13Z","lastTransitionTime":"2025-12-04T15:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.998897 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.998955 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.998973 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.998998 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:13 crc kubenswrapper[4946]: I1204 15:04:13.999018 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:13Z","lastTransitionTime":"2025-12-04T15:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.101656 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.101726 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.101743 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.101768 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.101785 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:14Z","lastTransitionTime":"2025-12-04T15:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.204835 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.205242 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.205359 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.205496 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.205587 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:14Z","lastTransitionTime":"2025-12-04T15:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.308591 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.309039 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.309295 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.309430 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.309532 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:14Z","lastTransitionTime":"2025-12-04T15:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.412371 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.412811 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.412912 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.413024 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.413147 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:14Z","lastTransitionTime":"2025-12-04T15:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.452778 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.452823 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.452860 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:14 crc kubenswrapper[4946]: E1204 15:04:14.452944 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:14 crc kubenswrapper[4946]: E1204 15:04:14.453086 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:14 crc kubenswrapper[4946]: E1204 15:04:14.453212 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.515943 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.516338 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.516467 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.516602 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.516732 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:14Z","lastTransitionTime":"2025-12-04T15:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.620049 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.620102 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.620132 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.620151 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.620164 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:14Z","lastTransitionTime":"2025-12-04T15:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.723754 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.723791 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.723799 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.723813 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.723822 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:14Z","lastTransitionTime":"2025-12-04T15:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.826068 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.826146 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.826157 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.826172 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.826183 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:14Z","lastTransitionTime":"2025-12-04T15:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.928847 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.928928 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.928942 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.928965 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:14 crc kubenswrapper[4946]: I1204 15:04:14.928981 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:14Z","lastTransitionTime":"2025-12-04T15:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.031523 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.031587 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.031599 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.031619 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.031634 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:15Z","lastTransitionTime":"2025-12-04T15:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.134677 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.134742 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.134757 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.134779 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.134793 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:15Z","lastTransitionTime":"2025-12-04T15:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.237850 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.237928 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.237947 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.237976 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.237995 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:15Z","lastTransitionTime":"2025-12-04T15:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.341566 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.341687 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.341712 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.341741 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.341764 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:15Z","lastTransitionTime":"2025-12-04T15:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.444964 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.445040 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.445057 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.445088 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.445106 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:15Z","lastTransitionTime":"2025-12-04T15:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.452405 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:15 crc kubenswrapper[4946]: E1204 15:04:15.452591 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.548447 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.548519 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.548541 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.548569 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.548592 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:15Z","lastTransitionTime":"2025-12-04T15:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.652510 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.652599 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.652621 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.652651 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.652670 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:15Z","lastTransitionTime":"2025-12-04T15:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.756080 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.756188 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.756210 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.756237 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.756258 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:15Z","lastTransitionTime":"2025-12-04T15:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.859276 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.859335 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.859345 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.859363 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.859374 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:15Z","lastTransitionTime":"2025-12-04T15:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.961589 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.961646 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.961658 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.961680 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:15 crc kubenswrapper[4946]: I1204 15:04:15.961694 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:15Z","lastTransitionTime":"2025-12-04T15:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.064450 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.064515 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.064528 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.064548 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.064561 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:16Z","lastTransitionTime":"2025-12-04T15:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.167472 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.167518 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.167528 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.167545 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.167558 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:16Z","lastTransitionTime":"2025-12-04T15:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.270560 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.270602 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.270610 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.270626 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.270635 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:16Z","lastTransitionTime":"2025-12-04T15:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.372586 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.372652 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.372672 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.372696 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.372716 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:16Z","lastTransitionTime":"2025-12-04T15:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.451753 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:16 crc kubenswrapper[4946]: E1204 15:04:16.451946 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.452321 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.452402 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:16 crc kubenswrapper[4946]: E1204 15:04:16.452568 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:16 crc kubenswrapper[4946]: E1204 15:04:16.452974 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.475650 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.475685 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.475696 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.475717 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.475733 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:16Z","lastTransitionTime":"2025-12-04T15:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.578635 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.578956 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.579046 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.579196 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.579298 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:16Z","lastTransitionTime":"2025-12-04T15:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.682780 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.682851 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.682865 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.682889 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.682904 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:16Z","lastTransitionTime":"2025-12-04T15:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.786669 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.786715 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.786727 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.786747 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.786763 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:16Z","lastTransitionTime":"2025-12-04T15:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.890054 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.890103 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.890134 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.890158 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.890169 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:16Z","lastTransitionTime":"2025-12-04T15:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.993069 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.993185 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.993201 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.993227 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:16 crc kubenswrapper[4946]: I1204 15:04:16.993241 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:16Z","lastTransitionTime":"2025-12-04T15:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.096428 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.096485 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.096506 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.096529 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.096549 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:17Z","lastTransitionTime":"2025-12-04T15:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.199264 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.199325 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.199341 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.199381 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.199396 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:17Z","lastTransitionTime":"2025-12-04T15:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.302166 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.302659 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.302765 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.302843 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.302903 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:17Z","lastTransitionTime":"2025-12-04T15:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.405879 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.405929 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.405939 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.405955 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.405967 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:17Z","lastTransitionTime":"2025-12-04T15:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.452154 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:17 crc kubenswrapper[4946]: E1204 15:04:17.452316 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.508282 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.508342 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.508363 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.508404 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.508417 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:17Z","lastTransitionTime":"2025-12-04T15:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.616784 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.616876 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.616893 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.616916 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.616929 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:17Z","lastTransitionTime":"2025-12-04T15:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.720192 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.720243 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.720253 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.720272 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.720286 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:17Z","lastTransitionTime":"2025-12-04T15:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.823697 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.823765 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.823778 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.823798 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.823809 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:17Z","lastTransitionTime":"2025-12-04T15:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.927084 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.927150 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.927160 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.927183 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:17 crc kubenswrapper[4946]: I1204 15:04:17.927192 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:17Z","lastTransitionTime":"2025-12-04T15:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.030385 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.030450 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.030464 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.030487 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.030507 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.133407 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.133459 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.133474 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.133497 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.133510 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.192949 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.193002 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.193017 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.193035 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.193046 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: E1204 15:04:18.207589 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:18Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.211948 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.211979 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.212007 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.212025 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.212034 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: E1204 15:04:18.227942 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:18Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.232453 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.232501 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.232513 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.232535 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.232549 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: E1204 15:04:18.246026 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:18Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.251095 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.251169 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.251181 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.251207 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.251221 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: E1204 15:04:18.264404 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:18Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.269564 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.272165 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.273808 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.273858 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.273878 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: E1204 15:04:18.287976 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T15:04:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8fd3dcd4-d2c1-4220-bf1e-9a9ba8de07e7\\\",\\\"systemUUID\\\":\\\"0d5ec34d-e409-4ecd-b977-fe4455c38295\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:18Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:18 crc kubenswrapper[4946]: E1204 15:04:18.288477 4946 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.290457 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.290489 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.290499 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.290514 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.290525 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.393597 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.393667 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.393688 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.393715 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.393733 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.452599 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.452685 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.452632 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:18 crc kubenswrapper[4946]: E1204 15:04:18.452797 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:18 crc kubenswrapper[4946]: E1204 15:04:18.452948 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:18 crc kubenswrapper[4946]: E1204 15:04:18.453073 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.497825 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.497868 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.497881 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.497898 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.497909 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.600573 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.600619 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.600631 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.600648 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.600663 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.703766 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.703815 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.703836 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.703857 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.703870 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.807198 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.807259 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.807269 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.807307 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.807319 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.910252 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.910301 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.910313 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.910333 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:18 crc kubenswrapper[4946]: I1204 15:04:18.910344 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:18Z","lastTransitionTime":"2025-12-04T15:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.012925 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.012979 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.012987 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.013002 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.013011 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:19Z","lastTransitionTime":"2025-12-04T15:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.115435 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.115479 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.115491 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.115510 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.115524 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:19Z","lastTransitionTime":"2025-12-04T15:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.218274 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.218319 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.218331 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.218348 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.218359 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:19Z","lastTransitionTime":"2025-12-04T15:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.321547 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.321594 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.321605 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.321625 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.321640 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:19Z","lastTransitionTime":"2025-12-04T15:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.425028 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.425095 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.425162 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.425197 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.425219 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:19Z","lastTransitionTime":"2025-12-04T15:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.451928 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:19 crc kubenswrapper[4946]: E1204 15:04:19.452209 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.467755 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd6b836-f508-4a4d-8981-b4d8371aba43\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42aa2de3e954b29b1d06dbfa70cfce3fd4ab67429abbbd4ec07aac4284f32e36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d188cd5f6e35af6fe300e8761b2d502b44b37c0b2edc2f09754bf85fa67f0d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d188cd5f6e35af6fe300e8761b2d502b44b37c0b2edc2f09754bf85fa67f0d57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.486084 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dc1295be9c3bd79d462d2a05f5a8f6436b8befed12c5a3f411b16f3771b04f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.502079 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fjmh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:40Z\\\",\\\"message\\\":\\\"2025-12-04T15:02:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c\\\\n2025-12-04T15:02:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_96487a8a-6292-44e9-b01b-0c12615a547c to /host/opt/cni/bin/\\\\n2025-12-04T15:02:55Z [verbose] multus-daemon started\\\\n2025-12-04T15:02:55Z [verbose] Readiness Indicator file check\\\\n2025-12-04T15:03:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kbs5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fjmh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.525411 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T15:03:54Z\\\",\\\"message\\\":\\\")\\\\nI1204 15:03:54.792043 7020 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 15:03:54.792099 7020 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-fjmh5\\\\nI1204 15:03:54.791956 7020 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-jdjs9\\\\nI1204 15:03:54.792168 7020 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-jdjs9 in node crc\\\\nI1204 15:03:54.792176 7020 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-jdjs9 after 0 failed attempt(s)\\\\nI1204 15:03:54.792182 7020 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-jdjs9\\\\nI1204 15:03:54.792012 7020 obj_retry.go:303] R\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8fd42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-w598m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.527924 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.527973 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.527986 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.528006 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.528021 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:19Z","lastTransitionTime":"2025-12-04T15:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.539521 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebe89772-ac8c-413e-93a0-3e230b3746da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e413ddd521d200695659ca37fbaa7516d9a5c658643dd6f03646cdbe3108edef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cee2731ef02ecb95e5c91995b2c3f3df2ea00b9226145ce0ed5b41d0d87ed52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8x9kt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-k58zs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.554916 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a3cccbb-17c2-487d-a952-6b5d50656e2a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6hcl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:03:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9xbtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.578176 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf347f23-9d92-45c6-a45b-8fcc90ac08c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38611bce45609aecabae35e50df1ce29eb0b781cee771df33346effa66abfbcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9572530c7e88d9c7e687dfdb8aa78681aae7216645c9312212290e492d83e807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://918c2134164eceb0ba967ebd0159ce5c3d84a1f4e6b56dd9d97a2fb1736e421e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://633b16af3f2dc44b943e36316eb8457af8cdf8e9d263b36974fb11155c8de93c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a316f010d82bc045ac0d264da0be1d699823bec96a2306effb752fac7dfdd24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5750447cd1d744a3c5125ac9cefa0c28729ffbab36f5656d859e784f33259c2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5750447cd1d744a3c5125ac9cefa0c28729ffbab36f5656d859e784f33259c2c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee6c95868bfa5a0d4d2d388fc654ce7ffa793da975943c6fefb06e03cf6c60b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee6c95868bfa5a0d4d2d388fc654ce7ffa793da975943c6fefb06e03cf6c60b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:31Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://adc53226b1d284cc01ce63df1ea5745393d749eba5486cbf3a848be8b60d2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adc53226b1d284cc01ce63df1ea5745393d749eba5486cbf3a848be8b60d2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.594291 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.607844 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.618752 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s76w5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0fa893-f2f4-4864-a154-fb91f3dc76f0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc32c02464b5e0fc3f7baf9c16faca8e09bf096d2e8008eab8120c5399659f24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bl4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s76w5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.628473 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f47d6bc-3d05-4c97-902f-5714244b2a1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3ebc2e51a8583ba9af309bad2349de966a52c2d39297072ac52dd51619cc44f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt75h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-qhv79\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.631830 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.631864 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.631876 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.632101 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.632112 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:19Z","lastTransitionTime":"2025-12-04T15:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.640637 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jdjs9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a85cf818-0fdc-4438-8357-38be4a980937\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7655de7bcfdfcaee28c18f3814f7f1e82aed75b93b157aa383fb2617353a1d90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9ktl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:53Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jdjs9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.652470 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da258083b546bad028a416ef87f23b9075efe1f740a8751023bfcff3d4989f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.665424 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534e3524-8e4b-474e-b14e-2da113cec158\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72f785427f140a13096691bdc220a2aeac96610843ffc67515fb6a452981b4c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16a2592e5f911a6ed0143381d51a45c78969c1055b08bf710e25b2186e4bc88d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fb42a847a66a38280125dfb2451ed875276101a37a2fefe4f74478806afc11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08870a5c59f66b003e8bdd54769a2fa3b534341a1c0fc82d4e0c3f8d70a10724\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87aa112fb72e9832e5dc69d9e8e5d72b4dfd653d6e322278465a48d45e2acd1a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0831cf4130c749723f522b3704976441fa21c06a59b1d7abdca6f8aaf9097cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7a5a194300c24c39455398cce2d990a5beb0929e285b2a862b0457028a47a5a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:03:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:03:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c7hh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fzjk8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.678518 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7f0724-c253-4acc-8909-ca111112af4e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-04T15:02:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1204 15:02:43.674492 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1204 15:02:43.675401 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4174515253/tls.crt::/tmp/serving-cert-4174515253/tls.key\\\\\\\"\\\\nI1204 15:02:49.369084 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1204 15:02:49.372322 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1204 15:02:49.372343 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1204 15:02:49.372369 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1204 15:02:49.372374 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1204 15:02:49.380068 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1204 15:02:49.380674 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380717 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1204 15:02:49.380746 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1204 15:02:49.380775 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1204 15:02:49.380804 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1204 15:02:49.380850 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1204 15:02:49.381285 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1204 15:02:49.382598 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.690922 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdb13d38-e5f8-45b7-aebb-0dac3d760972\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3d0c7af257df4c2fa9786be5ae1400336328b248f35976bdd68de5d4cefee4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://652d9b0796994da50a0f94f455e71c7bdab9c75a3078b4ae14092d04fd1b7df3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dab1c5d9ad7b0df3b860405d20f348eabe0a65587fc9b74b3d3a79e95672b69b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.701424 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ea9dbda-9794-4f46-b470-bcd55e5e2dd3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:03:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba178e72ed627de57f14e2824e6f3df502d1381b5f3cfa499956cbb491913244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://487b17c1161cd918a231631762628de98ccd97dcfa9bdeeb371e5a7b75ebd541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e555c36c9745da9c2a6c943ed8ed26354d90ca29c4760317dafd74573ffbe32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a3816fd6d882642d3e35c2d938817e1f674ce23717b59782538abb2243289c4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T15:02:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T15:02:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T15:02:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.712991 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.724163 4946 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T15:02:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d859871bfcbe2a2dcdc14dcb0dd1db8e543dd86259eb6ef2638f0d6f16e575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd3f9edab11c2c0a79fc09637f5ec9987cf8f7b420ed9c25ceafe7ea1d378245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T15:02:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T15:04:19Z is after 2025-08-24T17:21:41Z" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.734276 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.734315 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.734329 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.734348 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:19 crc kubenswrapper[4946]: I1204 15:04:19.734361 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:19Z","lastTransitionTime":"2025-12-04T15:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.059187 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.059222 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.059231 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.059247 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.059256 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:20Z","lastTransitionTime":"2025-12-04T15:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.162690 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.162764 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.162784 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.162815 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.162836 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:20Z","lastTransitionTime":"2025-12-04T15:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.265818 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.265865 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.265875 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.265895 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.265906 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:20Z","lastTransitionTime":"2025-12-04T15:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.368598 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.368641 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.368652 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.368670 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.368680 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:20Z","lastTransitionTime":"2025-12-04T15:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.452330 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.452330 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.452334 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:20 crc kubenswrapper[4946]: E1204 15:04:20.452989 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:20 crc kubenswrapper[4946]: E1204 15:04:20.453074 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.453148 4946 scope.go:117] "RemoveContainer" containerID="0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1" Dec 04 15:04:20 crc kubenswrapper[4946]: E1204 15:04:20.453319 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:20 crc kubenswrapper[4946]: E1204 15:04:20.453497 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.471973 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.472182 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.472196 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.472217 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.472229 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:20Z","lastTransitionTime":"2025-12-04T15:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.575166 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.575208 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.575217 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.575233 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.575244 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:20Z","lastTransitionTime":"2025-12-04T15:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.678252 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.678316 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.678330 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.678352 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.678369 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:20Z","lastTransitionTime":"2025-12-04T15:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.781075 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.781157 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.781178 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.781204 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.781223 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:20Z","lastTransitionTime":"2025-12-04T15:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.883895 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.883957 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.883969 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.883988 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.883999 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:20Z","lastTransitionTime":"2025-12-04T15:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.987591 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.987640 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.987650 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.987671 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:20 crc kubenswrapper[4946]: I1204 15:04:20.987686 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:20Z","lastTransitionTime":"2025-12-04T15:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.090755 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.090893 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.090934 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.090969 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.091012 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:21Z","lastTransitionTime":"2025-12-04T15:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.194249 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.194337 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.194352 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.194370 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.194383 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:21Z","lastTransitionTime":"2025-12-04T15:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.297023 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.297102 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.297139 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.297163 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.297175 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:21Z","lastTransitionTime":"2025-12-04T15:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.400173 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.400314 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.400336 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.400362 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.400383 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:21Z","lastTransitionTime":"2025-12-04T15:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.452724 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:21 crc kubenswrapper[4946]: E1204 15:04:21.452986 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.502766 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.502854 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.502868 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.502887 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.502903 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:21Z","lastTransitionTime":"2025-12-04T15:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.606813 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.606906 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.606926 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.606956 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.606990 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:21Z","lastTransitionTime":"2025-12-04T15:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.710985 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.711040 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.711054 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.711076 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.711091 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:21Z","lastTransitionTime":"2025-12-04T15:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.814618 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.814695 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.814729 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.814752 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.814766 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:21Z","lastTransitionTime":"2025-12-04T15:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.918243 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.918619 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.918649 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.918719 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:21 crc kubenswrapper[4946]: I1204 15:04:21.918738 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:21Z","lastTransitionTime":"2025-12-04T15:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.021311 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.021358 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.021370 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.021390 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.021404 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:22Z","lastTransitionTime":"2025-12-04T15:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.124162 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.124223 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.124240 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.124262 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.124278 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:22Z","lastTransitionTime":"2025-12-04T15:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.227526 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.227577 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.227587 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.227605 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.227621 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:22Z","lastTransitionTime":"2025-12-04T15:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.330460 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.330540 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.330550 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.330564 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.330578 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:22Z","lastTransitionTime":"2025-12-04T15:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.433107 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.433165 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.433178 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.433194 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.433205 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:22Z","lastTransitionTime":"2025-12-04T15:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.451748 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.451786 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.451855 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:22 crc kubenswrapper[4946]: E1204 15:04:22.451929 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:22 crc kubenswrapper[4946]: E1204 15:04:22.452045 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:22 crc kubenswrapper[4946]: E1204 15:04:22.452105 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.535599 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.535651 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.535661 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.535679 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.535691 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:22Z","lastTransitionTime":"2025-12-04T15:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.638848 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.638899 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.638912 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.638932 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.638947 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:22Z","lastTransitionTime":"2025-12-04T15:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.741480 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.741526 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.741540 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.741560 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.741573 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:22Z","lastTransitionTime":"2025-12-04T15:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.845030 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.845145 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.845174 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.845204 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.845229 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:22Z","lastTransitionTime":"2025-12-04T15:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.948560 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.948640 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.948661 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.948687 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:22 crc kubenswrapper[4946]: I1204 15:04:22.948707 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:22Z","lastTransitionTime":"2025-12-04T15:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.052101 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.052203 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.052222 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.052245 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.052263 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:23Z","lastTransitionTime":"2025-12-04T15:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.155943 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.156009 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.156022 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.156052 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.156066 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:23Z","lastTransitionTime":"2025-12-04T15:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.258332 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.258393 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.258403 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.258421 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.258433 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:23Z","lastTransitionTime":"2025-12-04T15:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.361098 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.361212 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.361230 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.361255 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.361272 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:23Z","lastTransitionTime":"2025-12-04T15:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.452253 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:23 crc kubenswrapper[4946]: E1204 15:04:23.452405 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.464159 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.464204 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.464214 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.464233 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.464247 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:23Z","lastTransitionTime":"2025-12-04T15:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.567939 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.568011 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.568027 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.568047 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.568059 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:23Z","lastTransitionTime":"2025-12-04T15:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.672712 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.672758 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.672770 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.672789 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.672800 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:23Z","lastTransitionTime":"2025-12-04T15:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.774738 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.774784 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.774796 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.774836 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.774849 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:23Z","lastTransitionTime":"2025-12-04T15:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.877850 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.877891 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.877901 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.877917 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.877928 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:23Z","lastTransitionTime":"2025-12-04T15:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.981188 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.981265 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.981279 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.981304 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:23 crc kubenswrapper[4946]: I1204 15:04:23.981320 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:23Z","lastTransitionTime":"2025-12-04T15:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.084109 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.084175 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.084185 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.084203 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.084214 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:24Z","lastTransitionTime":"2025-12-04T15:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.188030 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.188078 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.188088 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.188105 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.188130 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:24Z","lastTransitionTime":"2025-12-04T15:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.291822 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.291876 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.291888 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.291908 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.291921 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:24Z","lastTransitionTime":"2025-12-04T15:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.395509 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.395588 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.395610 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.395654 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.395691 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:24Z","lastTransitionTime":"2025-12-04T15:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.451950 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.452009 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.452054 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:24 crc kubenswrapper[4946]: E1204 15:04:24.452230 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:24 crc kubenswrapper[4946]: E1204 15:04:24.452410 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:24 crc kubenswrapper[4946]: E1204 15:04:24.452547 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.499414 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.499492 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.499509 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.499529 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.499568 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:24Z","lastTransitionTime":"2025-12-04T15:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.602200 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.602247 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.602279 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.602300 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.602313 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:24Z","lastTransitionTime":"2025-12-04T15:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.705549 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.705629 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.705642 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.705662 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.705675 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:24Z","lastTransitionTime":"2025-12-04T15:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.809282 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.809367 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.809382 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.809404 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.809418 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:24Z","lastTransitionTime":"2025-12-04T15:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.913060 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.913174 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.913190 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.913213 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:24 crc kubenswrapper[4946]: I1204 15:04:24.913232 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:24Z","lastTransitionTime":"2025-12-04T15:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.016696 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.016759 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.016778 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.016802 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.016821 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:25Z","lastTransitionTime":"2025-12-04T15:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.119710 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.119755 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.119765 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.119784 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.119794 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:25Z","lastTransitionTime":"2025-12-04T15:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.222839 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.222898 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.222911 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.222936 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.222953 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:25Z","lastTransitionTime":"2025-12-04T15:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.327584 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.327629 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.327640 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.327659 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.327670 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:25Z","lastTransitionTime":"2025-12-04T15:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.430576 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.430629 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.430644 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.430665 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.430679 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:25Z","lastTransitionTime":"2025-12-04T15:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.452156 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:25 crc kubenswrapper[4946]: E1204 15:04:25.452369 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.534574 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.534632 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.534644 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.534666 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.534679 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:25Z","lastTransitionTime":"2025-12-04T15:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.638233 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.638300 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.638314 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.638338 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.638354 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:25Z","lastTransitionTime":"2025-12-04T15:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.741360 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.741412 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.741423 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.741442 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.741454 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:25Z","lastTransitionTime":"2025-12-04T15:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.845051 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.845159 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.845172 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.845190 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.845204 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:25Z","lastTransitionTime":"2025-12-04T15:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.947392 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.947875 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.947941 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.948025 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:25 crc kubenswrapper[4946]: I1204 15:04:25.948128 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:25Z","lastTransitionTime":"2025-12-04T15:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.051372 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.051823 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.051942 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.052064 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.052188 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:26Z","lastTransitionTime":"2025-12-04T15:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.156026 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.156375 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.156506 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.156616 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.156724 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:26Z","lastTransitionTime":"2025-12-04T15:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.259429 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.259476 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.259488 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.259506 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.259520 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:26Z","lastTransitionTime":"2025-12-04T15:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.362187 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.362233 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.362250 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.362272 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.362287 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:26Z","lastTransitionTime":"2025-12-04T15:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.451740 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.451864 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.451895 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:26 crc kubenswrapper[4946]: E1204 15:04:26.452000 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:26 crc kubenswrapper[4946]: E1204 15:04:26.452158 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:26 crc kubenswrapper[4946]: E1204 15:04:26.452197 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.465001 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.465053 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.465067 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.465088 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.465105 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:26Z","lastTransitionTime":"2025-12-04T15:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.568285 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.568329 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.568339 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.568355 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.568371 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:26Z","lastTransitionTime":"2025-12-04T15:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.671145 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.671449 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.671551 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.671658 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.671761 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:26Z","lastTransitionTime":"2025-12-04T15:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.775215 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.775513 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.775619 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.775714 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.775801 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:26Z","lastTransitionTime":"2025-12-04T15:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.879489 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.879543 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.879553 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.879571 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.879582 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:26Z","lastTransitionTime":"2025-12-04T15:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.982947 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.983306 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.983411 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.983528 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:26 crc kubenswrapper[4946]: I1204 15:04:26.983620 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:26Z","lastTransitionTime":"2025-12-04T15:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.086789 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.086845 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.086860 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.086883 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.086895 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:27Z","lastTransitionTime":"2025-12-04T15:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.190364 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.190401 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.190412 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.190428 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.190441 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:27Z","lastTransitionTime":"2025-12-04T15:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.192196 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fjmh5_f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09/kube-multus/1.log" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.193140 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fjmh5_f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09/kube-multus/0.log" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.193202 4946 generic.go:334] "Generic (PLEG): container finished" podID="f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09" containerID="482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96" exitCode=1 Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.193243 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fjmh5" event={"ID":"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09","Type":"ContainerDied","Data":"482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96"} Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.193291 4946 scope.go:117] "RemoveContainer" containerID="c5f5997db3d64f3a1f4ee9c1a68c39b8b93fef76a35c46910bdd636fee89b43f" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.194363 4946 scope.go:117] "RemoveContainer" containerID="482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96" Dec 04 15:04:27 crc kubenswrapper[4946]: E1204 15:04:27.194898 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-fjmh5_openshift-multus(f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09)\"" pod="openshift-multus/multus-fjmh5" podUID="f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.241311 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=98.241281522 podStartE2EDuration="1m38.241281522s" podCreationTimestamp="2025-12-04 15:02:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:27.241263162 +0000 UTC m=+118.127306803" watchObservedRunningTime="2025-12-04 15:04:27.241281522 +0000 UTC m=+118.127325163" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.260585 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=96.260559592 podStartE2EDuration="1m36.260559592s" podCreationTimestamp="2025-12-04 15:02:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:27.260088628 +0000 UTC m=+118.146132269" watchObservedRunningTime="2025-12-04 15:04:27.260559592 +0000 UTC m=+118.146603233" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.290808 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=67.290773976 podStartE2EDuration="1m7.290773976s" podCreationTimestamp="2025-12-04 15:03:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:27.276052166 +0000 UTC m=+118.162095807" watchObservedRunningTime="2025-12-04 15:04:27.290773976 +0000 UTC m=+118.176817617" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.292882 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.292947 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.292959 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.292985 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.292996 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:27Z","lastTransitionTime":"2025-12-04T15:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.328009 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=29.327985734 podStartE2EDuration="29.327985734s" podCreationTimestamp="2025-12-04 15:03:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:27.312616094 +0000 UTC m=+118.198659735" watchObservedRunningTime="2025-12-04 15:04:27.327985734 +0000 UTC m=+118.214029375" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.395772 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.395813 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.395824 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.395839 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.395850 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:27Z","lastTransitionTime":"2025-12-04T15:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.403594 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podStartSLOduration=97.403568586 podStartE2EDuration="1m37.403568586s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:27.390526408 +0000 UTC m=+118.276570069" watchObservedRunningTime="2025-12-04 15:04:27.403568586 +0000 UTC m=+118.289612227" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.404009 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-jdjs9" podStartSLOduration=97.40400373 podStartE2EDuration="1m37.40400373s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:27.403380061 +0000 UTC m=+118.289423712" watchObservedRunningTime="2025-12-04 15:04:27.40400373 +0000 UTC m=+118.290047371" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.420577 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-k58zs" podStartSLOduration=97.420549986 podStartE2EDuration="1m37.420549986s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:27.420373641 +0000 UTC m=+118.306417282" watchObservedRunningTime="2025-12-04 15:04:27.420549986 +0000 UTC m=+118.306593627" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.452835 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:27 crc kubenswrapper[4946]: E1204 15:04:27.453148 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.464489 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=16.464459329 podStartE2EDuration="16.464459329s" podCreationTimestamp="2025-12-04 15:04:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:27.463470079 +0000 UTC m=+118.349513720" watchObservedRunningTime="2025-12-04 15:04:27.464459329 +0000 UTC m=+118.350502970" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.498240 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.498328 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.498341 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.498368 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.498383 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:27Z","lastTransitionTime":"2025-12-04T15:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.515758 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-s76w5" podStartSLOduration=97.515727857 podStartE2EDuration="1m37.515727857s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:27.515085148 +0000 UTC m=+118.401128809" watchObservedRunningTime="2025-12-04 15:04:27.515727857 +0000 UTC m=+118.401771508" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.552146 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-fzjk8" podStartSLOduration=97.5521051 podStartE2EDuration="1m37.5521051s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:27.551663046 +0000 UTC m=+118.437706687" watchObservedRunningTime="2025-12-04 15:04:27.5521051 +0000 UTC m=+118.438148741" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.600865 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.600922 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.600933 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.600949 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.600963 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:27Z","lastTransitionTime":"2025-12-04T15:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.704298 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.704355 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.704367 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.704384 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.704398 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:27Z","lastTransitionTime":"2025-12-04T15:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.808297 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.808350 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.808363 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.808382 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.808395 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:27Z","lastTransitionTime":"2025-12-04T15:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.911752 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.912250 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.912330 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.912422 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:27 crc kubenswrapper[4946]: I1204 15:04:27.912499 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:27Z","lastTransitionTime":"2025-12-04T15:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.016136 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.016194 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.016211 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.016233 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.016248 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:28Z","lastTransitionTime":"2025-12-04T15:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.119015 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.119105 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.119150 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.119179 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.119197 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:28Z","lastTransitionTime":"2025-12-04T15:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.199595 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fjmh5_f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09/kube-multus/1.log" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.221836 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.221874 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.221886 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.221903 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.221916 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:28Z","lastTransitionTime":"2025-12-04T15:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.325405 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.326328 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.326403 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.326428 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.326448 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:28Z","lastTransitionTime":"2025-12-04T15:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.430002 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.430072 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.430087 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.430109 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.430154 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:28Z","lastTransitionTime":"2025-12-04T15:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.452631 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.452752 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.452792 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:28 crc kubenswrapper[4946]: E1204 15:04:28.452868 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:28 crc kubenswrapper[4946]: E1204 15:04:28.452947 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:28 crc kubenswrapper[4946]: E1204 15:04:28.453055 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.533535 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.533611 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.533621 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.533639 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.533650 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:28Z","lastTransitionTime":"2025-12-04T15:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.636995 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.637035 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.637046 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.637065 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.637077 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:28Z","lastTransitionTime":"2025-12-04T15:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.684730 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.684776 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.684795 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.684819 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.684835 4946 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T15:04:28Z","lastTransitionTime":"2025-12-04T15:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.744526 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm"] Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.745253 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.747490 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.747963 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.748524 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.749062 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.866092 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.866657 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.866975 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.867051 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.867150 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.968222 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.968640 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.968766 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.968886 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.969033 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.969212 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.969333 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.970392 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.977421 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:28 crc kubenswrapper[4946]: I1204 15:04:28.988310 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/34c87710-397c-4bb4-b6f8-ccc74b5c0ce6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-vd8wm\" (UID: \"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:29 crc kubenswrapper[4946]: I1204 15:04:29.061371 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" Dec 04 15:04:29 crc kubenswrapper[4946]: I1204 15:04:29.208463 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" event={"ID":"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6","Type":"ContainerStarted","Data":"ddbe09e513bd6a6eddfb59893fab7c301c9f5b7a1483e6e44fb31fdd6f1da5f3"} Dec 04 15:04:29 crc kubenswrapper[4946]: I1204 15:04:29.452535 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:29 crc kubenswrapper[4946]: E1204 15:04:29.453739 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:29 crc kubenswrapper[4946]: E1204 15:04:29.467852 4946 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 04 15:04:30 crc kubenswrapper[4946]: E1204 15:04:30.062946 4946 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:04:30 crc kubenswrapper[4946]: I1204 15:04:30.214775 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" event={"ID":"34c87710-397c-4bb4-b6f8-ccc74b5c0ce6","Type":"ContainerStarted","Data":"1752f15a77f9c30993cdf216df6aabcde3dbbab70844c7c5759657a4de74161a"} Dec 04 15:04:30 crc kubenswrapper[4946]: I1204 15:04:30.235576 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-vd8wm" podStartSLOduration=100.235551811 podStartE2EDuration="1m40.235551811s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:30.235253342 +0000 UTC m=+121.121297003" watchObservedRunningTime="2025-12-04 15:04:30.235551811 +0000 UTC m=+121.121595452" Dec 04 15:04:30 crc kubenswrapper[4946]: I1204 15:04:30.451934 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:30 crc kubenswrapper[4946]: I1204 15:04:30.452004 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:30 crc kubenswrapper[4946]: I1204 15:04:30.451923 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:30 crc kubenswrapper[4946]: E1204 15:04:30.452251 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:30 crc kubenswrapper[4946]: E1204 15:04:30.452147 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:30 crc kubenswrapper[4946]: E1204 15:04:30.452432 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:31 crc kubenswrapper[4946]: I1204 15:04:31.452076 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:31 crc kubenswrapper[4946]: E1204 15:04:31.452314 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:32 crc kubenswrapper[4946]: I1204 15:04:32.451769 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:32 crc kubenswrapper[4946]: I1204 15:04:32.451794 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:32 crc kubenswrapper[4946]: E1204 15:04:32.451943 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:32 crc kubenswrapper[4946]: I1204 15:04:32.451966 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:32 crc kubenswrapper[4946]: E1204 15:04:32.452555 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:32 crc kubenswrapper[4946]: E1204 15:04:32.452774 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:32 crc kubenswrapper[4946]: I1204 15:04:32.453623 4946 scope.go:117] "RemoveContainer" containerID="0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1" Dec 04 15:04:32 crc kubenswrapper[4946]: E1204 15:04:32.453991 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-w598m_openshift-ovn-kubernetes(3537c3df-cdbc-4e1c-aee1-f2d942207a5a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" Dec 04 15:04:33 crc kubenswrapper[4946]: I1204 15:04:33.452385 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:33 crc kubenswrapper[4946]: E1204 15:04:33.453340 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:34 crc kubenswrapper[4946]: I1204 15:04:34.451702 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:34 crc kubenswrapper[4946]: E1204 15:04:34.451874 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:34 crc kubenswrapper[4946]: I1204 15:04:34.451953 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:34 crc kubenswrapper[4946]: E1204 15:04:34.452032 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:34 crc kubenswrapper[4946]: I1204 15:04:34.452259 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:34 crc kubenswrapper[4946]: E1204 15:04:34.452483 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:35 crc kubenswrapper[4946]: E1204 15:04:35.064328 4946 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:04:35 crc kubenswrapper[4946]: I1204 15:04:35.452523 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:35 crc kubenswrapper[4946]: E1204 15:04:35.452747 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:36 crc kubenswrapper[4946]: I1204 15:04:36.452659 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:36 crc kubenswrapper[4946]: I1204 15:04:36.452736 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:36 crc kubenswrapper[4946]: E1204 15:04:36.452870 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:36 crc kubenswrapper[4946]: I1204 15:04:36.452758 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:36 crc kubenswrapper[4946]: E1204 15:04:36.452957 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:36 crc kubenswrapper[4946]: E1204 15:04:36.452987 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:37 crc kubenswrapper[4946]: I1204 15:04:37.452560 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:37 crc kubenswrapper[4946]: E1204 15:04:37.452820 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:38 crc kubenswrapper[4946]: I1204 15:04:38.451668 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:38 crc kubenswrapper[4946]: I1204 15:04:38.451742 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:38 crc kubenswrapper[4946]: I1204 15:04:38.451697 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:38 crc kubenswrapper[4946]: E1204 15:04:38.451809 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:38 crc kubenswrapper[4946]: E1204 15:04:38.451902 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:38 crc kubenswrapper[4946]: E1204 15:04:38.451969 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:39 crc kubenswrapper[4946]: I1204 15:04:39.452799 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:39 crc kubenswrapper[4946]: E1204 15:04:39.454246 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:39 crc kubenswrapper[4946]: I1204 15:04:39.454892 4946 scope.go:117] "RemoveContainer" containerID="482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96" Dec 04 15:04:40 crc kubenswrapper[4946]: E1204 15:04:40.065616 4946 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:04:40 crc kubenswrapper[4946]: I1204 15:04:40.254545 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fjmh5_f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09/kube-multus/1.log" Dec 04 15:04:40 crc kubenswrapper[4946]: I1204 15:04:40.254631 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fjmh5" event={"ID":"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09","Type":"ContainerStarted","Data":"04157dee9b66b9c96469e5f0cdf517501a2e2855eb90759879ca3ded4097554c"} Dec 04 15:04:40 crc kubenswrapper[4946]: I1204 15:04:40.276066 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-fjmh5" podStartSLOduration=110.276040932 podStartE2EDuration="1m50.276040932s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:40.275800694 +0000 UTC m=+131.161844345" watchObservedRunningTime="2025-12-04 15:04:40.276040932 +0000 UTC m=+131.162084573" Dec 04 15:04:40 crc kubenswrapper[4946]: I1204 15:04:40.452368 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:40 crc kubenswrapper[4946]: I1204 15:04:40.452434 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:40 crc kubenswrapper[4946]: I1204 15:04:40.452368 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:40 crc kubenswrapper[4946]: E1204 15:04:40.452535 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:40 crc kubenswrapper[4946]: E1204 15:04:40.452712 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:40 crc kubenswrapper[4946]: E1204 15:04:40.452990 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:41 crc kubenswrapper[4946]: I1204 15:04:41.451917 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:41 crc kubenswrapper[4946]: E1204 15:04:41.452159 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:42 crc kubenswrapper[4946]: I1204 15:04:42.451849 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:42 crc kubenswrapper[4946]: I1204 15:04:42.451940 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:42 crc kubenswrapper[4946]: I1204 15:04:42.452042 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:42 crc kubenswrapper[4946]: E1204 15:04:42.452048 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:42 crc kubenswrapper[4946]: E1204 15:04:42.452202 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:42 crc kubenswrapper[4946]: E1204 15:04:42.452418 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:43 crc kubenswrapper[4946]: I1204 15:04:43.452781 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:43 crc kubenswrapper[4946]: E1204 15:04:43.453275 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:43 crc kubenswrapper[4946]: I1204 15:04:43.453670 4946 scope.go:117] "RemoveContainer" containerID="0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1" Dec 04 15:04:44 crc kubenswrapper[4946]: I1204 15:04:44.269875 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/3.log" Dec 04 15:04:44 crc kubenswrapper[4946]: I1204 15:04:44.271900 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:04:44 crc kubenswrapper[4946]: I1204 15:04:44.272643 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerStarted","Data":"7a50aab186604f5678ccff903749ddd74758ee16d496f773128aeb0af53f61bd"} Dec 04 15:04:44 crc kubenswrapper[4946]: I1204 15:04:44.274867 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:04:44 crc kubenswrapper[4946]: I1204 15:04:44.309487 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podStartSLOduration=114.30946522 podStartE2EDuration="1m54.30946522s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:04:44.309346866 +0000 UTC m=+135.195390527" watchObservedRunningTime="2025-12-04 15:04:44.30946522 +0000 UTC m=+135.195508861" Dec 04 15:04:44 crc kubenswrapper[4946]: I1204 15:04:44.331532 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-9xbtr"] Dec 04 15:04:44 crc kubenswrapper[4946]: I1204 15:04:44.331682 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:44 crc kubenswrapper[4946]: E1204 15:04:44.331790 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:44 crc kubenswrapper[4946]: I1204 15:04:44.452654 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:44 crc kubenswrapper[4946]: E1204 15:04:44.452826 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:44 crc kubenswrapper[4946]: I1204 15:04:44.453090 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:44 crc kubenswrapper[4946]: E1204 15:04:44.453180 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:44 crc kubenswrapper[4946]: I1204 15:04:44.453282 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:44 crc kubenswrapper[4946]: E1204 15:04:44.453363 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:45 crc kubenswrapper[4946]: E1204 15:04:45.068297 4946 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:04:45 crc kubenswrapper[4946]: I1204 15:04:45.452588 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:45 crc kubenswrapper[4946]: E1204 15:04:45.452816 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:46 crc kubenswrapper[4946]: I1204 15:04:46.451717 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:46 crc kubenswrapper[4946]: I1204 15:04:46.451825 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:46 crc kubenswrapper[4946]: I1204 15:04:46.451849 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:46 crc kubenswrapper[4946]: E1204 15:04:46.451933 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:46 crc kubenswrapper[4946]: E1204 15:04:46.452053 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:46 crc kubenswrapper[4946]: E1204 15:04:46.452236 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:47 crc kubenswrapper[4946]: I1204 15:04:47.452868 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:47 crc kubenswrapper[4946]: E1204 15:04:47.453055 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:48 crc kubenswrapper[4946]: I1204 15:04:48.452279 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:48 crc kubenswrapper[4946]: I1204 15:04:48.452439 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:48 crc kubenswrapper[4946]: I1204 15:04:48.452627 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:48 crc kubenswrapper[4946]: E1204 15:04:48.452615 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 15:04:48 crc kubenswrapper[4946]: E1204 15:04:48.452753 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 15:04:48 crc kubenswrapper[4946]: E1204 15:04:48.452809 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 15:04:49 crc kubenswrapper[4946]: I1204 15:04:49.452911 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:49 crc kubenswrapper[4946]: E1204 15:04:49.454326 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9xbtr" podUID="0a3cccbb-17c2-487d-a952-6b5d50656e2a" Dec 04 15:04:50 crc kubenswrapper[4946]: I1204 15:04:50.452509 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:50 crc kubenswrapper[4946]: I1204 15:04:50.452646 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:50 crc kubenswrapper[4946]: I1204 15:04:50.453961 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:50 crc kubenswrapper[4946]: I1204 15:04:50.455083 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 04 15:04:50 crc kubenswrapper[4946]: I1204 15:04:50.455530 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 04 15:04:50 crc kubenswrapper[4946]: I1204 15:04:50.455816 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 04 15:04:50 crc kubenswrapper[4946]: I1204 15:04:50.456287 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 04 15:04:51 crc kubenswrapper[4946]: I1204 15:04:51.452843 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:04:51 crc kubenswrapper[4946]: I1204 15:04:51.455489 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 04 15:04:51 crc kubenswrapper[4946]: I1204 15:04:51.456020 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 04 15:04:52 crc kubenswrapper[4946]: I1204 15:04:52.479105 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:04:52 crc kubenswrapper[4946]: I1204 15:04:52.479509 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.212957 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:04:57 crc kubenswrapper[4946]: E1204 15:04:57.213180 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:06:59.213144447 +0000 UTC m=+270.099188098 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.315082 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.315168 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.315201 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.315224 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.316294 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.321268 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.321858 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.322038 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.370565 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.384816 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:57 crc kubenswrapper[4946]: I1204 15:04:57.395549 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 15:04:57 crc kubenswrapper[4946]: W1204 15:04:57.583072 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-722db8d95901fcc1cf6884f337ce2fd38e150e7c15b4891d71fab5556788910c WatchSource:0}: Error finding container 722db8d95901fcc1cf6884f337ce2fd38e150e7c15b4891d71fab5556788910c: Status 404 returned error can't find the container with id 722db8d95901fcc1cf6884f337ce2fd38e150e7c15b4891d71fab5556788910c Dec 04 15:04:57 crc kubenswrapper[4946]: W1204 15:04:57.630882 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-6d6325805a189c4cd0f98d8fee325a55eda777c263504f606a169a33ba65fbe5 WatchSource:0}: Error finding container 6d6325805a189c4cd0f98d8fee325a55eda777c263504f606a169a33ba65fbe5: Status 404 returned error can't find the container with id 6d6325805a189c4cd0f98d8fee325a55eda777c263504f606a169a33ba65fbe5 Dec 04 15:04:58 crc kubenswrapper[4946]: I1204 15:04:58.324717 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"a76493a6ad5cb2c6e60730b94b5be7990e158ec0544be0cf226d70d3aeb985ac"} Dec 04 15:04:58 crc kubenswrapper[4946]: I1204 15:04:58.324786 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"6d6325805a189c4cd0f98d8fee325a55eda777c263504f606a169a33ba65fbe5"} Dec 04 15:04:58 crc kubenswrapper[4946]: I1204 15:04:58.328168 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"fada8e82f7ea20b1790b68f87ae4aab7b38ea8ec8e36e8a5f89e345b6595fbbe"} Dec 04 15:04:58 crc kubenswrapper[4946]: I1204 15:04:58.328219 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"722db8d95901fcc1cf6884f337ce2fd38e150e7c15b4891d71fab5556788910c"} Dec 04 15:04:58 crc kubenswrapper[4946]: I1204 15:04:58.332980 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"a267107d6995f56fb8948c52be6be60d4a70cc0e45b462c4e61a34ed98edc05d"} Dec 04 15:04:58 crc kubenswrapper[4946]: I1204 15:04:58.333049 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"77d331408449a5be78250d7f524f570e26852d094c2ebf9de25edc40c96a8a4f"} Dec 04 15:04:58 crc kubenswrapper[4946]: I1204 15:04:58.333341 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.260903 4946 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.300791 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bn6ld"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.301215 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.304349 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-z8dg2"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.304745 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7bzmc"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.305045 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.306623 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.307056 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5fshb"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.307359 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.309655 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cp7w9"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.309979 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-47szc"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.310305 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.310641 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.310721 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.310644 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.312040 4946 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7": failed to list *v1.Secret: secrets "machine-api-operator-dockercfg-mfbb7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.312086 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-dockercfg-mfbb7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-dockercfg-mfbb7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.312609 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.313249 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.321403 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w8nz2"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.324142 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.326316 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.326531 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.326876 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq"] Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.338892 4946 reflector.go:561] object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc": failed to list *v1.Secret: secrets "oauth-openshift-dockercfg-znhcc" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.338962 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"oauth-openshift-dockercfg-znhcc\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"oauth-openshift-dockercfg-znhcc\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.339165 4946 reflector.go:561] object-"openshift-authentication-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.339182 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.339294 4946 reflector.go:561] object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4": failed to list *v1.Secret: secrets "machine-approver-sa-dockercfg-nl2j4" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.339337 4946 reflector.go:561] object-"openshift-authentication"/"audit": failed to list *v1.ConfigMap: configmaps "audit" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.339352 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"audit\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"audit\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.339306 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.339415 4946 reflector.go:561] object-"openshift-authentication-operator"/"trusted-ca-bundle": failed to list *v1.ConfigMap: configmaps "trusted-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.339429 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"trusted-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"trusted-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.339520 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.339629 4946 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv": failed to list *v1.Secret: secrets "openshift-apiserver-operator-dockercfg-xtcjv" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.339650 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-dockercfg-xtcjv\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-operator-dockercfg-xtcjv\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.339707 4946 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-tls": failed to list *v1.Secret: secrets "machine-api-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.339724 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.339797 4946 reflector.go:561] object-"openshift-cluster-machine-approver"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.339814 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.339841 4946 reflector.go:561] object-"openshift-machine-api"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.339857 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.339882 4946 reflector.go:561] object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.339899 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.339975 4946 reflector.go:561] object-"openshift-machine-api"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.339983 4946 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert": failed to list *v1.Secret: secrets "openshift-apiserver-operator-serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.339988 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.340001 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-operator-serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.340050 4946 reflector.go:561] object-"openshift-authentication-operator"/"service-ca-bundle": failed to list *v1.ConfigMap: configmaps "service-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.340063 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"service-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"service-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.340060 4946 reflector.go:561] object-"openshift-machine-api"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.340089 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.340099 4946 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-session": failed to list *v1.Secret: secrets "v4-0-config-system-session" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.340173 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-session\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-system-session\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.339345 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"machine-approver-sa-dockercfg-nl2j4\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-approver-sa-dockercfg-nl2j4\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.340088 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.340251 4946 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-router-certs": failed to list *v1.Secret: secrets "v4-0-config-system-router-certs" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.340404 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.340413 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.340267 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-router-certs\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-system-router-certs\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.340880 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.340954 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bn6ld"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.341057 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.341611 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.341781 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.342885 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.343001 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.343014 4946 reflector.go:561] object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj": failed to list *v1.Secret: secrets "authentication-operator-dockercfg-mz9bj" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.343073 4946 reflector.go:561] object-"openshift-cluster-machine-approver"/"machine-approver-tls": failed to list *v1.Secret: secrets "machine-approver-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.343070 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"authentication-operator-dockercfg-mz9bj\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"authentication-operator-dockercfg-mz9bj\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.343102 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"machine-approver-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-approver-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.343159 4946 reflector.go:561] object-"openshift-apiserver-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.343171 4946 reflector.go:561] object-"openshift-authentication-operator"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.343175 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.343184 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.343212 4946 reflector.go:561] object-"openshift-authentication-operator"/"authentication-operator-config": failed to list *v1.ConfigMap: configmaps "authentication-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.343225 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"authentication-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"authentication-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.343264 4946 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.343277 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.343293 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.343334 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.343498 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.343808 4946 reflector.go:561] object-"openshift-authentication-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.343846 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.343934 4946 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config": failed to list *v1.ConfigMap: configmaps "openshift-apiserver-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.343950 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-apiserver-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.344005 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.344167 4946 reflector.go:561] object-"openshift-cluster-machine-approver"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.344190 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.344292 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.344397 4946 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-images": failed to list *v1.ConfigMap: configmaps "machine-api-operator-images" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.344415 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-images\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"machine-api-operator-images\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.344470 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 04 15:04:59 crc kubenswrapper[4946]: W1204 15:04:59.344711 4946 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle": failed to list *v1.ConfigMap: configmaps "v4-0-config-system-trusted-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 04 15:04:59 crc kubenswrapper[4946]: E1204 15:04:59.344728 4946 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-trusted-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"v4-0-config-system-trusted-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.346077 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.353306 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.353767 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.353965 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.354085 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.354189 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.354288 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.354366 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.354472 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.354488 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.354524 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.354908 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.355132 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7bzmc"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.355640 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.355719 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.355786 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.355910 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.356061 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.356195 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.356241 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.356261 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.356197 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.356368 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.358066 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.358304 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cp7w9"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.359784 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-z8dg2"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.359824 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w8nz2"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.362812 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.363222 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.369587 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.375369 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.375912 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.376448 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.376697 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.377295 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.377444 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.377553 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.377670 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.377783 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.379916 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-v4qw8"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.380793 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.390550 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.397970 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.398805 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.399371 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441581 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441624 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/42d67fa8-e84e-4a09-a51d-c63365c274c5-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441643 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441661 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-config\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441677 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-dir\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441694 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/42d67fa8-e84e-4a09-a51d-c63365c274c5-etcd-client\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441710 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-config\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441725 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/8b20a593-dece-40b0-ae3f-12a9fabbf3e1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-7tl7m\" (UID: \"8b20a593-dece-40b0-ae3f-12a9fabbf3e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441742 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cd92273-2253-49c3-9e31-0da3e687c206-serving-cert\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441775 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s98w\" (UniqueName: \"kubernetes.io/projected/65dc1ade-ddd4-4a22-99bd-780112f318f9-kube-api-access-2s98w\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441797 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cd92273-2253-49c3-9e31-0da3e687c206-config\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441820 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/4ed440a8-a3c7-48da-9811-bcc77750303a-machine-approver-tls\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441890 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d0437519-c01e-4b89-a007-8fda5902ea9f-node-pullsecrets\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441908 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-image-import-ca\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441925 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93885d87-d68e-4445-abd6-ece851137b17-serving-cert\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441942 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-policies\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441960 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.441983 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442001 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-serving-cert\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442021 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvblf\" (UniqueName: \"kubernetes.io/projected/4f02848c-6b07-4c72-8753-c34c4a3f210f-kube-api-access-tvblf\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442037 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-auth-proxy-config\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442057 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-service-ca-bundle\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442081 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442105 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-images\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442201 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442247 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0cd92273-2253-49c3-9e31-0da3e687c206-trusted-ca\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442268 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/42d67fa8-e84e-4a09-a51d-c63365c274c5-audit-policies\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442290 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/42d67fa8-e84e-4a09-a51d-c63365c274c5-audit-dir\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442330 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442377 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442439 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-config\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442492 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442516 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-audit\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442538 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z52pr\" (UniqueName: \"kubernetes.io/projected/0cd92273-2253-49c3-9e31-0da3e687c206-kube-api-access-z52pr\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442570 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42d67fa8-e84e-4a09-a51d-c63365c274c5-serving-cert\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442591 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-config\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442623 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442646 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt224\" (UniqueName: \"kubernetes.io/projected/4ed440a8-a3c7-48da-9811-bcc77750303a-kube-api-access-kt224\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442662 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442683 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-client-ca\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442699 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsgbf\" (UniqueName: \"kubernetes.io/projected/8b20a593-dece-40b0-ae3f-12a9fabbf3e1-kube-api-access-hsgbf\") pod \"cluster-samples-operator-665b6dd947-7tl7m\" (UID: \"8b20a593-dece-40b0-ae3f-12a9fabbf3e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442742 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-558k2\" (UniqueName: \"kubernetes.io/projected/42d67fa8-e84e-4a09-a51d-c63365c274c5-kube-api-access-558k2\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442761 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-etcd-serving-ca\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442786 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-config\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442806 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/42d67fa8-e84e-4a09-a51d-c63365c274c5-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442840 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfcnp\" (UniqueName: \"kubernetes.io/projected/d0437519-c01e-4b89-a007-8fda5902ea9f-kube-api-access-nfcnp\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442903 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.442933 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/42d67fa8-e84e-4a09-a51d-c63365c274c5-encryption-config\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443188 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f02848c-6b07-4c72-8753-c34c4a3f210f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443219 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t529n\" (UniqueName: \"kubernetes.io/projected/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-kube-api-access-t529n\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443263 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f02848c-6b07-4c72-8753-c34c4a3f210f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443292 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/65dc1ade-ddd4-4a22-99bd-780112f318f9-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443339 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d0437519-c01e-4b89-a007-8fda5902ea9f-etcd-client\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443370 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443388 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d0437519-c01e-4b89-a007-8fda5902ea9f-encryption-config\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443421 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-config\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443442 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-client-ca\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443459 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqbr8\" (UniqueName: \"kubernetes.io/projected/93885d87-d68e-4445-abd6-ece851137b17-kube-api-access-cqbr8\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443481 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgh74\" (UniqueName: \"kubernetes.io/projected/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-kube-api-access-kgh74\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443501 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f998c86-99a4-4416-b810-b40a8fb1775f-serving-cert\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443517 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d0437519-c01e-4b89-a007-8fda5902ea9f-audit-dir\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443546 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hxpn\" (UniqueName: \"kubernetes.io/projected/1f998c86-99a4-4416-b810-b40a8fb1775f-kube-api-access-2hxpn\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443565 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0437519-c01e-4b89-a007-8fda5902ea9f-serving-cert\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.443592 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.475843 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.476205 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.476511 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.476625 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.476759 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.477208 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.477340 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.477484 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.479857 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.480365 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.481019 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.481320 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.480666 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.481783 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.482609 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-w56q9"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.483291 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6w5k9"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.483891 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.486047 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-w56q9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.486305 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.491058 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.491846 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.492170 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.492796 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.498350 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.498692 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.498387 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.498456 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.499195 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-9qzcp"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.499234 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.500026 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9qzcp" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.500832 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.501417 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.502018 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.502689 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.513276 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.513492 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.513507 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.513581 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.513652 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-qdrtz"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.513668 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.513839 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.513913 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.514012 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.514292 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-qdrtz" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.514806 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.516139 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.516544 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.516932 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.517100 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.517799 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.520054 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.525407 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.526882 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.528613 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.530625 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.531307 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4t8l"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.531652 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dkjqm"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.532165 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.532457 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.532629 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.533048 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.533478 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.534530 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.543758 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544396 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-config\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544440 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/42d67fa8-e84e-4a09-a51d-c63365c274c5-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544465 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfcnp\" (UniqueName: \"kubernetes.io/projected/d0437519-c01e-4b89-a007-8fda5902ea9f-kube-api-access-nfcnp\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544488 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/42d67fa8-e84e-4a09-a51d-c63365c274c5-encryption-config\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544515 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544541 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f02848c-6b07-4c72-8753-c34c4a3f210f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544565 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t529n\" (UniqueName: \"kubernetes.io/projected/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-kube-api-access-t529n\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544593 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwlhg\" (UniqueName: \"kubernetes.io/projected/fb5b0fbe-569f-4edb-a5da-c1d37eec5981-kube-api-access-mwlhg\") pod \"catalog-operator-68c6474976-bdwn8\" (UID: \"fb5b0fbe-569f-4edb-a5da-c1d37eec5981\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544618 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f02848c-6b07-4c72-8753-c34c4a3f210f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544641 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/65dc1ade-ddd4-4a22-99bd-780112f318f9-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544663 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-secret-volume\") pod \"collect-profiles-29414340-dzqwb\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544687 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d0437519-c01e-4b89-a007-8fda5902ea9f-etcd-client\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544708 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-config-volume\") pod \"collect-profiles-29414340-dzqwb\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544729 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-serving-cert\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544752 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-trusted-ca-bundle\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544778 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544802 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d0437519-c01e-4b89-a007-8fda5902ea9f-encryption-config\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544824 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-service-ca\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544848 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-config\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544870 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-client-ca\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544890 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqbr8\" (UniqueName: \"kubernetes.io/projected/93885d87-d68e-4445-abd6-ece851137b17-kube-api-access-cqbr8\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544913 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgh74\" (UniqueName: \"kubernetes.io/projected/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-kube-api-access-kgh74\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544934 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f998c86-99a4-4416-b810-b40a8fb1775f-serving-cert\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544953 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d0437519-c01e-4b89-a007-8fda5902ea9f-audit-dir\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544973 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fb5b0fbe-569f-4edb-a5da-c1d37eec5981-srv-cert\") pod \"catalog-operator-68c6474976-bdwn8\" (UID: \"fb5b0fbe-569f-4edb-a5da-c1d37eec5981\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.544997 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.545024 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hxpn\" (UniqueName: \"kubernetes.io/projected/1f998c86-99a4-4416-b810-b40a8fb1775f-kube-api-access-2hxpn\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.545048 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0437519-c01e-4b89-a007-8fda5902ea9f-serving-cert\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.545066 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.559492 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d0437519-c01e-4b89-a007-8fda5902ea9f-audit-dir\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.560243 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/42d67fa8-e84e-4a09-a51d-c63365c274c5-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.562173 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.564515 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.566049 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.569681 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-config\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.570438 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-client-ca\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.571423 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-56jhv"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.545074 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.572970 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/42d67fa8-e84e-4a09-a51d-c63365c274c5-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.577868 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/42d67fa8-e84e-4a09-a51d-c63365c274c5-encryption-config\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.578709 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-config\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.583564 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f998c86-99a4-4416-b810-b40a8fb1775f-serving-cert\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.583690 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0437519-c01e-4b89-a007-8fda5902ea9f-serving-cert\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.584026 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-dir\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.584066 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.584134 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-config\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.584527 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/42d67fa8-e84e-4a09-a51d-c63365c274c5-etcd-client\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.590474 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.590644 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d0437519-c01e-4b89-a007-8fda5902ea9f-etcd-client\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.591317 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d0437519-c01e-4b89-a007-8fda5902ea9f-encryption-config\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.591372 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-zzdp9"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.591702 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-config\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592204 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592401 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-config\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592523 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.584588 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-config\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592583 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/8b20a593-dece-40b0-ae3f-12a9fabbf3e1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-7tl7m\" (UID: \"8b20a593-dece-40b0-ae3f-12a9fabbf3e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592603 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cd92273-2253-49c3-9e31-0da3e687c206-serving-cert\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592623 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/4ed440a8-a3c7-48da-9811-bcc77750303a-machine-approver-tls\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592641 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s98w\" (UniqueName: \"kubernetes.io/projected/65dc1ade-ddd4-4a22-99bd-780112f318f9-kube-api-access-2s98w\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592656 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cd92273-2253-49c3-9e31-0da3e687c206-config\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592674 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d0437519-c01e-4b89-a007-8fda5902ea9f-node-pullsecrets\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592689 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-image-import-ca\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592704 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93885d87-d68e-4445-abd6-ece851137b17-serving-cert\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592710 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592719 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592743 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-policies\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592776 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592803 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-serving-cert\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592835 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbmbg\" (UniqueName: \"kubernetes.io/projected/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-kube-api-access-pbmbg\") pod \"collect-profiles-29414340-dzqwb\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592853 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-oauth-serving-cert\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592877 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvblf\" (UniqueName: \"kubernetes.io/projected/4f02848c-6b07-4c72-8753-c34c4a3f210f-kube-api-access-tvblf\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592909 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-service-ca-bundle\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592924 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-auth-proxy-config\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592944 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592960 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjvqx\" (UniqueName: \"kubernetes.io/projected/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-kube-api-access-rjvqx\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592978 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-images\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592993 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592998 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593011 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0cd92273-2253-49c3-9e31-0da3e687c206-trusted-ca\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593028 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fb5b0fbe-569f-4edb-a5da-c1d37eec5981-profile-collector-cert\") pod \"catalog-operator-68c6474976-bdwn8\" (UID: \"fb5b0fbe-569f-4edb-a5da-c1d37eec5981\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593049 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/42d67fa8-e84e-4a09-a51d-c63365c274c5-audit-policies\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593064 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/42d67fa8-e84e-4a09-a51d-c63365c274c5-audit-dir\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593080 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593096 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-oauth-config\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593132 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593161 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-config\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593180 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593195 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-audit\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593213 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z52pr\" (UniqueName: \"kubernetes.io/projected/0cd92273-2253-49c3-9e31-0da3e687c206-kube-api-access-z52pr\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593229 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42d67fa8-e84e-4a09-a51d-c63365c274c5-serving-cert\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593252 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-config\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593283 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593318 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt224\" (UniqueName: \"kubernetes.io/projected/4ed440a8-a3c7-48da-9811-bcc77750303a-kube-api-access-kt224\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593338 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593358 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-client-ca\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593379 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsgbf\" (UniqueName: \"kubernetes.io/projected/8b20a593-dece-40b0-ae3f-12a9fabbf3e1-kube-api-access-hsgbf\") pod \"cluster-samples-operator-665b6dd947-7tl7m\" (UID: \"8b20a593-dece-40b0-ae3f-12a9fabbf3e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593413 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-558k2\" (UniqueName: \"kubernetes.io/projected/42d67fa8-e84e-4a09-a51d-c63365c274c5-kube-api-access-558k2\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593435 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-etcd-serving-ca\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593546 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fs4wh"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.593880 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-etcd-serving-ca\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.594207 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.594670 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.594942 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2bms7"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.595346 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.595842 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.596863 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xwt27"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.597006 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.597515 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.592622 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-dir\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.598470 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/42d67fa8-e84e-4a09-a51d-c63365c274c5-etcd-client\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.598493 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cd92273-2253-49c3-9e31-0da3e687c206-config\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.598523 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/42d67fa8-e84e-4a09-a51d-c63365c274c5-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.598557 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d0437519-c01e-4b89-a007-8fda5902ea9f-node-pullsecrets\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.599565 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-image-import-ca\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.600742 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lg2p8"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.601560 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.602266 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.602707 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/8b20a593-dece-40b0-ae3f-12a9fabbf3e1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-7tl7m\" (UID: \"8b20a593-dece-40b0-ae3f-12a9fabbf3e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.602996 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cd92273-2253-49c3-9e31-0da3e687c206-serving-cert\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.603103 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/42d67fa8-e84e-4a09-a51d-c63365c274c5-audit-dir\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.603407 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-audit\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.604079 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.604134 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.604149 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5fshb"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.604161 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-v4qw8"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.604171 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.604250 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.604617 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93885d87-d68e-4445-abd6-ece851137b17-serving-cert\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.604981 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.606104 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/42d67fa8-e84e-4a09-a51d-c63365c274c5-audit-policies\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.606289 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0437519-c01e-4b89-a007-8fda5902ea9f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.606809 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.606831 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.615346 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.615513 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.615616 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.615691 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.615810 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-w56q9"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.615893 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.615976 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dkjqm"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.608553 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0cd92273-2253-49c3-9e31-0da3e687c206-trusted-ca\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.607515 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-config\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.610647 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42d67fa8-e84e-4a09-a51d-c63365c274c5-serving-cert\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.609888 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-client-ca\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.610355 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.612680 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.613484 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.616844 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.617442 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.618647 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6w5k9"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.620305 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.623199 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9qzcp"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.623672 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.624055 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.625227 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.625255 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.627398 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-v66dj"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.628371 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.628481 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-v66dj" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.629642 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xwt27"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.630588 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.632957 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-v66dj"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.635174 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4t8l"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.636812 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.638939 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lg2p8"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.640831 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-56jhv"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.643050 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.643827 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.644540 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fs4wh"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.646402 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2bms7"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.647817 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wm6jt"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.648827 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.649839 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.652897 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.654749 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wm6jt"] Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.663511 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.684473 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.694660 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-oauth-config\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.694807 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ed0f3fb-346c-4409-8d05-6286b8151dd2-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-69nvv\" (UID: \"9ed0f3fb-346c-4409-8d05-6286b8151dd2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.694942 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-z4t8l\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.695020 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c3d07ea4-a001-42fe-9405-7f9f95f5523f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s796b\" (UID: \"c3d07ea4-a001-42fe-9405-7f9f95f5523f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.695290 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwlhg\" (UniqueName: \"kubernetes.io/projected/fb5b0fbe-569f-4edb-a5da-c1d37eec5981-kube-api-access-mwlhg\") pod \"catalog-operator-68c6474976-bdwn8\" (UID: \"fb5b0fbe-569f-4edb-a5da-c1d37eec5981\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.695480 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-secret-volume\") pod \"collect-profiles-29414340-dzqwb\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.695578 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-config-volume\") pod \"collect-profiles-29414340-dzqwb\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.695671 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-serving-cert\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.695855 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-service-ca\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.695971 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-trusted-ca-bundle\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.696175 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxsgj\" (UniqueName: \"kubernetes.io/projected/0b12867e-de02-4b45-ac09-5140aab7451e-kube-api-access-vxsgj\") pod \"marketplace-operator-79b997595-z4t8l\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.696309 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3539ee4e-f397-45e0-b449-93b150766448-metrics-tls\") pod \"dns-operator-744455d44c-6w5k9\" (UID: \"3539ee4e-f397-45e0-b449-93b150766448\") " pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.696548 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-config-volume\") pod \"collect-profiles-29414340-dzqwb\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.696712 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-service-ca\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.696873 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fb5b0fbe-569f-4edb-a5da-c1d37eec5981-srv-cert\") pod \"catalog-operator-68c6474976-bdwn8\" (UID: \"fb5b0fbe-569f-4edb-a5da-c1d37eec5981\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.697062 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-trusted-ca-bundle\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.697214 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szblj\" (UniqueName: \"kubernetes.io/projected/3539ee4e-f397-45e0-b449-93b150766448-kube-api-access-szblj\") pod \"dns-operator-744455d44c-6w5k9\" (UID: \"3539ee4e-f397-45e0-b449-93b150766448\") " pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.697328 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2eebdd40-7f32-4d80-9a29-28373b288710-signing-key\") pod \"service-ca-9c57cc56f-dkjqm\" (UID: \"2eebdd40-7f32-4d80-9a29-28373b288710\") " pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.697493 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-config\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.697613 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-z4t8l\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.697727 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gncj6\" (UniqueName: \"kubernetes.io/projected/c3d07ea4-a001-42fe-9405-7f9f95f5523f-kube-api-access-gncj6\") pod \"olm-operator-6b444d44fb-s796b\" (UID: \"c3d07ea4-a001-42fe-9405-7f9f95f5523f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.697824 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4grl\" (UniqueName: \"kubernetes.io/projected/2eebdd40-7f32-4d80-9a29-28373b288710-kube-api-access-n4grl\") pod \"service-ca-9c57cc56f-dkjqm\" (UID: \"2eebdd40-7f32-4d80-9a29-28373b288710\") " pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.697951 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2eebdd40-7f32-4d80-9a29-28373b288710-signing-cabundle\") pod \"service-ca-9c57cc56f-dkjqm\" (UID: \"2eebdd40-7f32-4d80-9a29-28373b288710\") " pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.698073 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbmbg\" (UniqueName: \"kubernetes.io/projected/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-kube-api-access-pbmbg\") pod \"collect-profiles-29414340-dzqwb\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.698104 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-oauth-serving-cert\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.698146 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-config\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.698172 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjvqx\" (UniqueName: \"kubernetes.io/projected/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-kube-api-access-rjvqx\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.698204 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9ed0f3fb-346c-4409-8d05-6286b8151dd2-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-69nvv\" (UID: \"9ed0f3fb-346c-4409-8d05-6286b8151dd2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.698239 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ed0f3fb-346c-4409-8d05-6286b8151dd2-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-69nvv\" (UID: \"9ed0f3fb-346c-4409-8d05-6286b8151dd2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.698264 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fb5b0fbe-569f-4edb-a5da-c1d37eec5981-profile-collector-cert\") pod \"catalog-operator-68c6474976-bdwn8\" (UID: \"fb5b0fbe-569f-4edb-a5da-c1d37eec5981\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.698288 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c3d07ea4-a001-42fe-9405-7f9f95f5523f-srv-cert\") pod \"olm-operator-6b444d44fb-s796b\" (UID: \"c3d07ea4-a001-42fe-9405-7f9f95f5523f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.698346 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-secret-volume\") pod \"collect-profiles-29414340-dzqwb\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.698774 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-oauth-serving-cert\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.699526 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-serving-cert\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.699686 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-oauth-config\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.700984 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fb5b0fbe-569f-4edb-a5da-c1d37eec5981-srv-cert\") pod \"catalog-operator-68c6474976-bdwn8\" (UID: \"fb5b0fbe-569f-4edb-a5da-c1d37eec5981\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.701846 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fb5b0fbe-569f-4edb-a5da-c1d37eec5981-profile-collector-cert\") pod \"catalog-operator-68c6474976-bdwn8\" (UID: \"fb5b0fbe-569f-4edb-a5da-c1d37eec5981\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.703455 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.743930 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.763613 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.782973 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800036 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxsgj\" (UniqueName: \"kubernetes.io/projected/0b12867e-de02-4b45-ac09-5140aab7451e-kube-api-access-vxsgj\") pod \"marketplace-operator-79b997595-z4t8l\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800143 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3539ee4e-f397-45e0-b449-93b150766448-metrics-tls\") pod \"dns-operator-744455d44c-6w5k9\" (UID: \"3539ee4e-f397-45e0-b449-93b150766448\") " pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800190 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szblj\" (UniqueName: \"kubernetes.io/projected/3539ee4e-f397-45e0-b449-93b150766448-kube-api-access-szblj\") pod \"dns-operator-744455d44c-6w5k9\" (UID: \"3539ee4e-f397-45e0-b449-93b150766448\") " pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800245 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2eebdd40-7f32-4d80-9a29-28373b288710-signing-key\") pod \"service-ca-9c57cc56f-dkjqm\" (UID: \"2eebdd40-7f32-4d80-9a29-28373b288710\") " pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800285 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-z4t8l\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800320 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gncj6\" (UniqueName: \"kubernetes.io/projected/c3d07ea4-a001-42fe-9405-7f9f95f5523f-kube-api-access-gncj6\") pod \"olm-operator-6b444d44fb-s796b\" (UID: \"c3d07ea4-a001-42fe-9405-7f9f95f5523f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800387 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4grl\" (UniqueName: \"kubernetes.io/projected/2eebdd40-7f32-4d80-9a29-28373b288710-kube-api-access-n4grl\") pod \"service-ca-9c57cc56f-dkjqm\" (UID: \"2eebdd40-7f32-4d80-9a29-28373b288710\") " pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800448 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2eebdd40-7f32-4d80-9a29-28373b288710-signing-cabundle\") pod \"service-ca-9c57cc56f-dkjqm\" (UID: \"2eebdd40-7f32-4d80-9a29-28373b288710\") " pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800551 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9ed0f3fb-346c-4409-8d05-6286b8151dd2-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-69nvv\" (UID: \"9ed0f3fb-346c-4409-8d05-6286b8151dd2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800597 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ed0f3fb-346c-4409-8d05-6286b8151dd2-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-69nvv\" (UID: \"9ed0f3fb-346c-4409-8d05-6286b8151dd2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800629 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c3d07ea4-a001-42fe-9405-7f9f95f5523f-srv-cert\") pod \"olm-operator-6b444d44fb-s796b\" (UID: \"c3d07ea4-a001-42fe-9405-7f9f95f5523f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800702 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ed0f3fb-346c-4409-8d05-6286b8151dd2-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-69nvv\" (UID: \"9ed0f3fb-346c-4409-8d05-6286b8151dd2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800749 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-z4t8l\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.800801 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c3d07ea4-a001-42fe-9405-7f9f95f5523f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s796b\" (UID: \"c3d07ea4-a001-42fe-9405-7f9f95f5523f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.804094 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.806798 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c3d07ea4-a001-42fe-9405-7f9f95f5523f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s796b\" (UID: \"c3d07ea4-a001-42fe-9405-7f9f95f5523f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.807415 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3539ee4e-f397-45e0-b449-93b150766448-metrics-tls\") pod \"dns-operator-744455d44c-6w5k9\" (UID: \"3539ee4e-f397-45e0-b449-93b150766448\") " pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.823325 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.833915 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c3d07ea4-a001-42fe-9405-7f9f95f5523f-srv-cert\") pod \"olm-operator-6b444d44fb-s796b\" (UID: \"c3d07ea4-a001-42fe-9405-7f9f95f5523f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.843725 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.862902 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.884635 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.903284 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.924566 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.943760 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.963685 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 04 15:04:59 crc kubenswrapper[4946]: I1204 15:04:59.983999 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.003442 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.024278 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.044958 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.064678 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.074404 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ed0f3fb-346c-4409-8d05-6286b8151dd2-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-69nvv\" (UID: \"9ed0f3fb-346c-4409-8d05-6286b8151dd2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.084105 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.092285 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ed0f3fb-346c-4409-8d05-6286b8151dd2-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-69nvv\" (UID: \"9ed0f3fb-346c-4409-8d05-6286b8151dd2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.103570 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.124027 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.143778 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.164213 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.189266 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.204960 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.223861 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.244016 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.253887 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2eebdd40-7f32-4d80-9a29-28373b288710-signing-key\") pod \"service-ca-9c57cc56f-dkjqm\" (UID: \"2eebdd40-7f32-4d80-9a29-28373b288710\") " pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.264027 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.271610 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2eebdd40-7f32-4d80-9a29-28373b288710-signing-cabundle\") pod \"service-ca-9c57cc56f-dkjqm\" (UID: \"2eebdd40-7f32-4d80-9a29-28373b288710\") " pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.286182 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.303564 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.324717 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.343504 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.363186 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.375483 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-z4t8l\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.394757 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.401677 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-z4t8l\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.405316 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.423920 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.442845 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.488067 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqbr8\" (UniqueName: \"kubernetes.io/projected/93885d87-d68e-4445-abd6-ece851137b17-kube-api-access-cqbr8\") pod \"controller-manager-879f6c89f-bn6ld\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.523351 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.528248 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfcnp\" (UniqueName: \"kubernetes.io/projected/d0437519-c01e-4b89-a007-8fda5902ea9f-kube-api-access-nfcnp\") pod \"apiserver-76f77b778f-z8dg2\" (UID: \"d0437519-c01e-4b89-a007-8fda5902ea9f\") " pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.545351 4946 configmap.go:193] Couldn't get configMap openshift-authentication-operator/trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.545428 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-trusted-ca-bundle podName:cf2f88d9-df9c-4d4a-a1a1-6ab46612186e nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.045407527 +0000 UTC m=+151.931451178 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-trusted-ca-bundle") pod "authentication-operator-69f744f599-5fshb" (UID: "cf2f88d9-df9c-4d4a-a1a1-6ab46612186e") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.560792 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.561505 4946 configmap.go:193] Couldn't get configMap openshift-authentication-operator/authentication-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.561582 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-config podName:cf2f88d9-df9c-4d4a-a1a1-6ab46612186e nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.061561081 +0000 UTC m=+151.947604742 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-config") pod "authentication-operator-69f744f599-5fshb" (UID: "cf2f88d9-df9c-4d4a-a1a1-6ab46612186e") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.561781 4946 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-session: failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.561817 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-session podName:02de3a18-59d7-48c0-bf9c-d40c09ed8cee nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.061806269 +0000 UTC m=+151.947849920 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-session" (UniqueName: "kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-session") pod "oauth-openshift-558db77b4-cp7w9" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee") : failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.562425 4946 secret.go:188] Couldn't get secret openshift-apiserver-operator/openshift-apiserver-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.562464 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4f02848c-6b07-4c72-8753-c34c4a3f210f-serving-cert podName:4f02848c-6b07-4c72-8753-c34c4a3f210f nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.06245291 +0000 UTC m=+151.948496561 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/4f02848c-6b07-4c72-8753-c34c4a3f210f-serving-cert") pod "openshift-apiserver-operator-796bbdcf4f-jfnsb" (UID: "4f02848c-6b07-4c72-8753-c34c4a3f210f") : failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.562516 4946 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-router-certs: failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.562752 4946 configmap.go:193] Couldn't get configMap openshift-apiserver-operator/openshift-apiserver-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.562970 4946 secret.go:188] Couldn't get secret openshift-machine-api/machine-api-operator-tls: failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.562561 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs podName:02de3a18-59d7-48c0-bf9c-d40c09ed8cee nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.062545703 +0000 UTC m=+151.948589354 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-router-certs" (UniqueName: "kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs") pod "oauth-openshift-558db77b4-cp7w9" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee") : failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.563025 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4f02848c-6b07-4c72-8753-c34c4a3f210f-config podName:4f02848c-6b07-4c72-8753-c34c4a3f210f nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.063013718 +0000 UTC m=+151.949057379 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/4f02848c-6b07-4c72-8753-c34c4a3f210f-config") pod "openshift-apiserver-operator-796bbdcf4f-jfnsb" (UID: "4f02848c-6b07-4c72-8753-c34c4a3f210f") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.563052 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/65dc1ade-ddd4-4a22-99bd-780112f318f9-machine-api-operator-tls podName:65dc1ade-ddd4-4a22-99bd-780112f318f9 nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.063045639 +0000 UTC m=+151.949089290 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "machine-api-operator-tls" (UniqueName: "kubernetes.io/secret/65dc1ade-ddd4-4a22-99bd-780112f318f9-machine-api-operator-tls") pod "machine-api-operator-5694c8668f-7bzmc" (UID: "65dc1ade-ddd4-4a22-99bd-780112f318f9") : failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.565236 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t529n\" (UniqueName: \"kubernetes.io/projected/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-kube-api-access-t529n\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.584064 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hxpn\" (UniqueName: \"kubernetes.io/projected/1f998c86-99a4-4416-b810-b40a8fb1775f-kube-api-access-2hxpn\") pod \"route-controller-manager-6576b87f9c-fqprq\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.595144 4946 configmap.go:193] Couldn't get configMap openshift-machine-api/machine-api-operator-images: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.595243 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-images podName:65dc1ade-ddd4-4a22-99bd-780112f318f9 nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.095216643 +0000 UTC m=+151.981260294 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "images" (UniqueName: "kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-images") pod "machine-api-operator-5694c8668f-7bzmc" (UID: "65dc1ade-ddd4-4a22-99bd-780112f318f9") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.598364 4946 secret.go:188] Couldn't get secret openshift-cluster-machine-approver/machine-approver-tls: failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.598415 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4ed440a8-a3c7-48da-9811-bcc77750303a-machine-approver-tls podName:4ed440a8-a3c7-48da-9811-bcc77750303a nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.098401085 +0000 UTC m=+151.984444806 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "machine-approver-tls" (UniqueName: "kubernetes.io/secret/4ed440a8-a3c7-48da-9811-bcc77750303a-machine-approver-tls") pod "machine-approver-56656f9798-47szc" (UID: "4ed440a8-a3c7-48da-9811-bcc77750303a") : failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.598422 4946 configmap.go:193] Couldn't get configMap openshift-authentication/audit: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.598457 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-policies podName:02de3a18-59d7-48c0-bf9c-d40c09ed8cee nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.098448216 +0000 UTC m=+151.984491857 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "audit-policies" (UniqueName: "kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-policies") pod "oauth-openshift-558db77b4-cp7w9" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.602521 4946 secret.go:188] Couldn't get secret openshift-authentication-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.602541 4946 request.go:700] Waited for 1.011672732s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/secrets?fieldSelector=metadata.name%3Dinstallation-pull-secrets&limit=500&resourceVersion=0 Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.602589 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-serving-cert podName:cf2f88d9-df9c-4d4a-a1a1-6ab46612186e nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.102569257 +0000 UTC m=+151.988612968 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-serving-cert") pod "authentication-operator-69f744f599-5fshb" (UID: "cf2f88d9-df9c-4d4a-a1a1-6ab46612186e") : failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.604058 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.604860 4946 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.604974 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-trusted-ca-bundle podName:02de3a18-59d7-48c0-bf9c-d40c09ed8cee nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.104955644 +0000 UTC m=+151.990999285 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-trusted-ca-bundle") pod "oauth-openshift-558db77b4-cp7w9" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.605506 4946 configmap.go:193] Couldn't get configMap openshift-authentication-operator/service-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.605597 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-service-ca-bundle podName:cf2f88d9-df9c-4d4a-a1a1-6ab46612186e nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.105563213 +0000 UTC m=+151.991606854 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "service-ca-bundle" (UniqueName: "kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-service-ca-bundle") pod "authentication-operator-69f744f599-5fshb" (UID: "cf2f88d9-df9c-4d4a-a1a1-6ab46612186e") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.605670 4946 configmap.go:193] Couldn't get configMap openshift-cluster-machine-approver/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.605787 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-auth-proxy-config podName:4ed440a8-a3c7-48da-9811-bcc77750303a nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.10577426 +0000 UTC m=+151.991817961 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "auth-proxy-config" (UniqueName: "kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-auth-proxy-config") pod "machine-approver-56656f9798-47szc" (UID: "4ed440a8-a3c7-48da-9811-bcc77750303a") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.610200 4946 configmap.go:193] Couldn't get configMap openshift-machine-api/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: E1204 15:05:00.610264 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-config podName:65dc1ade-ddd4-4a22-99bd-780112f318f9 nodeName:}" failed. No retries permitted until 2025-12-04 15:05:01.110248872 +0000 UTC m=+151.996292593 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-config") pod "machine-api-operator-5694c8668f-7bzmc" (UID: "65dc1ade-ddd4-4a22-99bd-780112f318f9") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.616503 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.627520 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.650433 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.664892 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.684434 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.704907 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.726637 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.744146 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.750805 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bn6ld"] Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.763797 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.784001 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.803552 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.811851 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-z8dg2"] Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.823577 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.833392 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.844178 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.863681 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.883902 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.904768 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.964604 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.983829 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 04 15:05:00 crc kubenswrapper[4946]: I1204 15:05:00.991671 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq"] Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.004371 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.031215 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.044069 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.064961 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.084016 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.105697 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.123640 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-service-ca-bundle\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.123713 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-auth-proxy-config\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.123787 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-images\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.123860 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-config\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.123939 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-config\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.123982 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.124023 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f02848c-6b07-4c72-8753-c34c4a3f210f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.124076 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f02848c-6b07-4c72-8753-c34c4a3f210f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.124149 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/65dc1ade-ddd4-4a22-99bd-780112f318f9-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.124195 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.124292 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.124337 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/4ed440a8-a3c7-48da-9811-bcc77750303a-machine-approver-tls\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.124421 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-policies\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.124476 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.124500 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.124538 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-serving-cert\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.145264 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.184080 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.189906 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z52pr\" (UniqueName: \"kubernetes.io/projected/0cd92273-2253-49c3-9e31-0da3e687c206-kube-api-access-z52pr\") pod \"console-operator-58897d9998-w8nz2\" (UID: \"0cd92273-2253-49c3-9e31-0da3e687c206\") " pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.204064 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.224346 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.243729 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.263763 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.283471 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.304407 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.314006 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.324596 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.345044 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.363663 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.369398 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" event={"ID":"1f998c86-99a4-4416-b810-b40a8fb1775f","Type":"ContainerStarted","Data":"b195f00c8871a9f9866e625de6834d6aecb186f263506b3ecf8804f14984a312"} Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.371484 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" event={"ID":"d0437519-c01e-4b89-a007-8fda5902ea9f","Type":"ContainerStarted","Data":"4e960e4722dc037091c4b9c3bf5caca56ac19b0ae85ac217cd18b43ebe3340de"} Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.377611 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" event={"ID":"93885d87-d68e-4445-abd6-ece851137b17","Type":"ContainerStarted","Data":"a518f98c0ead0af32911567b6c0c825568c0eafbb7afab335b355cd481f42cf4"} Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.385444 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.442133 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsgbf\" (UniqueName: \"kubernetes.io/projected/8b20a593-dece-40b0-ae3f-12a9fabbf3e1-kube-api-access-hsgbf\") pod \"cluster-samples-operator-665b6dd947-7tl7m\" (UID: \"8b20a593-dece-40b0-ae3f-12a9fabbf3e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.461424 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-558k2\" (UniqueName: \"kubernetes.io/projected/42d67fa8-e84e-4a09-a51d-c63365c274c5-kube-api-access-558k2\") pod \"apiserver-7bbb656c7d-d6qcv\" (UID: \"42d67fa8-e84e-4a09-a51d-c63365c274c5\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.463562 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.484578 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 04 15:05:01 crc kubenswrapper[4946]: E1204 15:05:01.499461 4946 projected.go:288] Couldn't get configMap openshift-authentication-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.503257 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.516472 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w8nz2"] Dec 04 15:05:01 crc kubenswrapper[4946]: W1204 15:05:01.523275 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cd92273_2253_49c3_9e31_0da3e687c206.slice/crio-3bec1397e7212e512baed3154db802e824078ccd81f3c4762f89ae6c1b591852 WatchSource:0}: Error finding container 3bec1397e7212e512baed3154db802e824078ccd81f3c4762f89ae6c1b591852: Status 404 returned error can't find the container with id 3bec1397e7212e512baed3154db802e824078ccd81f3c4762f89ae6c1b591852 Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.523880 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.543881 4946 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.563490 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.584014 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.621592 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwlhg\" (UniqueName: \"kubernetes.io/projected/fb5b0fbe-569f-4edb-a5da-c1d37eec5981-kube-api-access-mwlhg\") pod \"catalog-operator-68c6474976-bdwn8\" (UID: \"fb5b0fbe-569f-4edb-a5da-c1d37eec5981\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.621888 4946 request.go:700] Waited for 1.923590585s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/serviceaccounts/collect-profiles/token Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.635317 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.640631 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbmbg\" (UniqueName: \"kubernetes.io/projected/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-kube-api-access-pbmbg\") pod \"collect-profiles-29414340-dzqwb\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.660842 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjvqx\" (UniqueName: \"kubernetes.io/projected/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-kube-api-access-rjvqx\") pod \"console-f9d7485db-v4qw8\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.663620 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.709482 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxsgj\" (UniqueName: \"kubernetes.io/projected/0b12867e-de02-4b45-ac09-5140aab7451e-kube-api-access-vxsgj\") pod \"marketplace-operator-79b997595-z4t8l\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.728334 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szblj\" (UniqueName: \"kubernetes.io/projected/3539ee4e-f397-45e0-b449-93b150766448-kube-api-access-szblj\") pod \"dns-operator-744455d44c-6w5k9\" (UID: \"3539ee4e-f397-45e0-b449-93b150766448\") " pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.737404 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.742516 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gncj6\" (UniqueName: \"kubernetes.io/projected/c3d07ea4-a001-42fe-9405-7f9f95f5523f-kube-api-access-gncj6\") pod \"olm-operator-6b444d44fb-s796b\" (UID: \"c3d07ea4-a001-42fe-9405-7f9f95f5523f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.745547 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.758518 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.763278 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4grl\" (UniqueName: \"kubernetes.io/projected/2eebdd40-7f32-4d80-9a29-28373b288710-kube-api-access-n4grl\") pod \"service-ca-9c57cc56f-dkjqm\" (UID: \"2eebdd40-7f32-4d80-9a29-28373b288710\") " pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.784972 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9ed0f3fb-346c-4409-8d05-6286b8151dd2-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-69nvv\" (UID: \"9ed0f3fb-346c-4409-8d05-6286b8151dd2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.786715 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.796336 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.810661 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.816040 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.817517 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.828047 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.843661 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.849776 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-policies\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.851644 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.869651 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.874369 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.884021 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.888691 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.890719 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.892319 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv"] Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.892355 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt224\" (UniqueName: \"kubernetes.io/projected/4ed440a8-a3c7-48da-9811-bcc77750303a-kube-api-access-kt224\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.906049 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.916849 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m"] Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.918551 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-service-ca-bundle\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.923985 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: E1204 15:05:01.930527 4946 projected.go:194] Error preparing data for projected volume kube-api-access-kgh74 for pod openshift-authentication-operator/authentication-operator-69f744f599-5fshb: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:01 crc kubenswrapper[4946]: E1204 15:05:01.930672 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-kube-api-access-kgh74 podName:cf2f88d9-df9c-4d4a-a1a1-6ab46612186e nodeName:}" failed. No retries permitted until 2025-12-04 15:05:02.430638335 +0000 UTC m=+153.316682146 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-kgh74" (UniqueName: "kubernetes.io/projected/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-kube-api-access-kgh74") pod "authentication-operator-69f744f599-5fshb" (UID: "cf2f88d9-df9c-4d4a-a1a1-6ab46612186e") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:01 crc kubenswrapper[4946]: E1204 15:05:01.938754 4946 projected.go:288] Couldn't get configMap openshift-machine-api/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.944973 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: W1204 15:05:01.950137 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42d67fa8_e84e_4a09_a51d_c63365c274c5.slice/crio-c2be90aadc4b9d3105ea4729c289f24a00738e4b5be6be3c8ad2ec07035a96a0 WatchSource:0}: Error finding container c2be90aadc4b9d3105ea4729c289f24a00738e4b5be6be3c8ad2ec07035a96a0: Status 404 returned error can't find the container with id c2be90aadc4b9d3105ea4729c289f24a00738e4b5be6be3c8ad2ec07035a96a0 Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.979617 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 04 15:05:01 crc kubenswrapper[4946]: E1204 15:05:01.980287 4946 projected.go:194] Error preparing data for projected volume kube-api-access-2s98w for pod openshift-machine-api/machine-api-operator-5694c8668f-7bzmc: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:01 crc kubenswrapper[4946]: E1204 15:05:01.980378 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/65dc1ade-ddd4-4a22-99bd-780112f318f9-kube-api-access-2s98w podName:65dc1ade-ddd4-4a22-99bd-780112f318f9 nodeName:}" failed. No retries permitted until 2025-12-04 15:05:02.480346088 +0000 UTC m=+153.366389729 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-2s98w" (UniqueName: "kubernetes.io/projected/65dc1ade-ddd4-4a22-99bd-780112f318f9-kube-api-access-2s98w") pod "machine-api-operator-5694c8668f-7bzmc" (UID: "65dc1ade-ddd4-4a22-99bd-780112f318f9") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:01 crc kubenswrapper[4946]: I1204 15:05:01.989161 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.007505 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.014810 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvblf\" (UniqueName: \"kubernetes.io/projected/4f02848c-6b07-4c72-8753-c34c4a3f210f-kube-api-access-tvblf\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.025890 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.069529 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.079207 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/4ed440a8-a3c7-48da-9811-bcc77750303a-machine-approver-tls\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.085689 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.100514 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/65dc1ade-ddd4-4a22-99bd-780112f318f9-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.111393 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.133498 4946 secret.go:188] Couldn't get secret openshift-authentication-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.133643 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-serving-cert podName:cf2f88d9-df9c-4d4a-a1a1-6ab46612186e nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.133608529 +0000 UTC m=+154.019652170 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-serving-cert") pod "authentication-operator-69f744f599-5fshb" (UID: "cf2f88d9-df9c-4d4a-a1a1-6ab46612186e") : failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.133828 4946 configmap.go:193] Couldn't get configMap openshift-machine-api/machine-api-operator-images: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.133861 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-images podName:65dc1ade-ddd4-4a22-99bd-780112f318f9 nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.133853957 +0000 UTC m=+154.019897598 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "images" (UniqueName: "kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-images") pod "machine-api-operator-5694c8668f-7bzmc" (UID: "65dc1ade-ddd4-4a22-99bd-780112f318f9") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.133885 4946 configmap.go:193] Couldn't get configMap openshift-authentication-operator/authentication-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.133910 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-config podName:cf2f88d9-df9c-4d4a-a1a1-6ab46612186e nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.133903489 +0000 UTC m=+154.019947130 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-config") pod "authentication-operator-69f744f599-5fshb" (UID: "cf2f88d9-df9c-4d4a-a1a1-6ab46612186e") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.133936 4946 configmap.go:193] Couldn't get configMap openshift-apiserver-operator/openshift-apiserver-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.133965 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4f02848c-6b07-4c72-8753-c34c4a3f210f-config podName:4f02848c-6b07-4c72-8753-c34c4a3f210f nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.13395772 +0000 UTC m=+154.020001361 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/4f02848c-6b07-4c72-8753-c34c4a3f210f-config") pod "openshift-apiserver-operator-796bbdcf4f-jfnsb" (UID: "4f02848c-6b07-4c72-8753-c34c4a3f210f") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.136530 4946 configmap.go:193] Couldn't get configMap openshift-machine-api/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.136649 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-config podName:65dc1ade-ddd4-4a22-99bd-780112f318f9 nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.136620605 +0000 UTC m=+154.022664246 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-config") pod "machine-api-operator-5694c8668f-7bzmc" (UID: "65dc1ade-ddd4-4a22-99bd-780112f318f9") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.141267 4946 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-router-certs: failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.141371 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs podName:02de3a18-59d7-48c0-bf9c-d40c09ed8cee nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.141347626 +0000 UTC m=+154.027391447 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-router-certs" (UniqueName: "kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs") pod "oauth-openshift-558db77b4-cp7w9" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee") : failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.141449 4946 configmap.go:193] Couldn't get configMap openshift-cluster-machine-approver/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.141576 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-auth-proxy-config podName:4ed440a8-a3c7-48da-9811-bcc77750303a nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.141528752 +0000 UTC m=+154.027572393 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "auth-proxy-config" (UniqueName: "kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-auth-proxy-config") pod "machine-approver-56656f9798-47szc" (UID: "4ed440a8-a3c7-48da-9811-bcc77750303a") : failed to sync configmap cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.141883 4946 secret.go:188] Couldn't get secret openshift-apiserver-operator/openshift-apiserver-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.141926 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4f02848c-6b07-4c72-8753-c34c4a3f210f-serving-cert podName:4f02848c-6b07-4c72-8753-c34c4a3f210f nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.141917334 +0000 UTC m=+154.027960975 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/4f02848c-6b07-4c72-8753-c34c4a3f210f-serving-cert") pod "openshift-apiserver-operator-796bbdcf4f-jfnsb" (UID: "4f02848c-6b07-4c72-8753-c34c4a3f210f") : failed to sync secret cache: timed out waiting for the condition Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.144434 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.144744 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.146987 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.155953 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb"] Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.163523 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-metrics-tls\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.163574 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8skgd\" (UniqueName: \"kubernetes.io/projected/70d4f1c0-1b54-4178-a025-d95419215a08-kube-api-access-8skgd\") pod \"machine-config-server-qdrtz\" (UID: \"70d4f1c0-1b54-4178-a025-d95419215a08\") " pod="openshift-machine-config-operator/machine-config-server-qdrtz" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.163603 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aca3dc1d-be1a-49f6-86e8-0addede99412-auth-proxy-config\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.163652 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7xq9\" (UniqueName: \"kubernetes.io/projected/90dd2744-a408-4164-bd61-88f44a4dc1ef-kube-api-access-r7xq9\") pod \"machine-config-controller-84d6567774-q85sw\" (UID: \"90dd2744-a408-4164-bd61-88f44a4dc1ef\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.163681 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-bound-sa-token\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.163700 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b24b049-2010-4a5d-813a-55a70bc07eaf-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-cjfn2\" (UID: \"5b24b049-2010-4a5d-813a-55a70bc07eaf\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.163727 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44d826fd-fefe-496b-9ee3-b6ea83d8227a-webhook-cert\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.163746 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-tls\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.163766 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc2nr\" (UniqueName: \"kubernetes.io/projected/7f0be614-bbcc-46e2-b1ee-2944e087d3f4-kube-api-access-jc2nr\") pod \"migrator-59844c95c7-2bxm7\" (UID: \"7f0be614-bbcc-46e2-b1ee-2944e087d3f4\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.163835 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b24b049-2010-4a5d-813a-55a70bc07eaf-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-cjfn2\" (UID: \"5b24b049-2010-4a5d-813a-55a70bc07eaf\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.163898 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d1001856-7024-4079-b253-c66661c9e6ef-cert\") pod \"ingress-canary-9qzcp\" (UID: \"d1001856-7024-4079-b253-c66661c9e6ef\") " pod="openshift-ingress-canary/ingress-canary-9qzcp" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.164013 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djdz2\" (UniqueName: \"kubernetes.io/projected/aca3dc1d-be1a-49f6-86e8-0addede99412-kube-api-access-djdz2\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.164089 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/650a1e58-9737-4c8a-b9aa-5529ca970fa6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nbwkf\" (UID: \"650a1e58-9737-4c8a-b9aa-5529ca970fa6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.164128 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/70d4f1c0-1b54-4178-a025-d95419215a08-node-bootstrap-token\") pod \"machine-config-server-qdrtz\" (UID: \"70d4f1c0-1b54-4178-a025-d95419215a08\") " pod="openshift-machine-config-operator/machine-config-server-qdrtz" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.164146 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aca3dc1d-be1a-49f6-86e8-0addede99412-proxy-tls\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.164189 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0ae46332-ca8f-4850-96bc-ca2d408b51d3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.164208 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/21f370a9-11c1-4b16-9610-ced611035357-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.171940 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172011 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0d0d8ae-0f47-4929-855b-60798f0d6bd3-config\") pod \"kube-apiserver-operator-766d6c64bb-jw77k\" (UID: \"d0d0d8ae-0f47-4929-855b-60798f0d6bd3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172031 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172062 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v2bg\" (UniqueName: \"kubernetes.io/projected/be182b12-eeb7-4695-b7e4-247044da76cf-kube-api-access-7v2bg\") pod \"control-plane-machine-set-operator-78cbb6b69f-rvvcq\" (UID: \"be182b12-eeb7-4695-b7e4-247044da76cf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172140 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f64cw\" (UniqueName: \"kubernetes.io/projected/21f370a9-11c1-4b16-9610-ced611035357-kube-api-access-f64cw\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172172 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xqf6\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-kube-api-access-7xqf6\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172237 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7478l\" (UniqueName: \"kubernetes.io/projected/650a1e58-9737-4c8a-b9aa-5529ca970fa6-kube-api-access-7478l\") pod \"package-server-manager-789f6589d5-nbwkf\" (UID: \"650a1e58-9737-4c8a-b9aa-5529ca970fa6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172259 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-trusted-ca\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172337 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/90dd2744-a408-4164-bd61-88f44a4dc1ef-proxy-tls\") pod \"machine-config-controller-84d6567774-q85sw\" (UID: \"90dd2744-a408-4164-bd61-88f44a4dc1ef\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172359 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58cbv\" (UniqueName: \"kubernetes.io/projected/53433a4f-ccda-4c5c-9dca-7389ec6d741c-kube-api-access-58cbv\") pod \"downloads-7954f5f757-w56q9\" (UID: \"53433a4f-ccda-4c5c-9dca-7389ec6d741c\") " pod="openshift-console/downloads-7954f5f757-w56q9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172414 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/aca3dc1d-be1a-49f6-86e8-0addede99412-images\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172475 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj8fs\" (UniqueName: \"kubernetes.io/projected/5b24b049-2010-4a5d-813a-55a70bc07eaf-kube-api-access-tj8fs\") pod \"kube-storage-version-migrator-operator-b67b599dd-cjfn2\" (UID: \"5b24b049-2010-4a5d-813a-55a70bc07eaf\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172549 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-certificates\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172572 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44d826fd-fefe-496b-9ee3-b6ea83d8227a-apiservice-cert\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172642 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/90dd2744-a408-4164-bd61-88f44a4dc1ef-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-q85sw\" (UID: \"90dd2744-a408-4164-bd61-88f44a4dc1ef\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172711 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172748 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/44d826fd-fefe-496b-9ee3-b6ea83d8227a-tmpfs\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.172810 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0ae46332-ca8f-4850-96bc-ca2d408b51d3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.173189 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:02.673171629 +0000 UTC m=+153.559215440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.173290 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/21f370a9-11c1-4b16-9610-ced611035357-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.173325 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0d0d8ae-0f47-4929-855b-60798f0d6bd3-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jw77k\" (UID: \"d0d0d8ae-0f47-4929-855b-60798f0d6bd3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.173348 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/70d4f1c0-1b54-4178-a025-d95419215a08-certs\") pod \"machine-config-server-qdrtz\" (UID: \"70d4f1c0-1b54-4178-a025-d95419215a08\") " pod="openshift-machine-config-operator/machine-config-server-qdrtz" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.173447 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-trusted-ca\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.173541 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/be182b12-eeb7-4695-b7e4-247044da76cf-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rvvcq\" (UID: \"be182b12-eeb7-4695-b7e4-247044da76cf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.173647 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m45sc\" (UniqueName: \"kubernetes.io/projected/44d826fd-fefe-496b-9ee3-b6ea83d8227a-kube-api-access-m45sc\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.173705 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8"] Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.173742 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv46k\" (UniqueName: \"kubernetes.io/projected/d1001856-7024-4079-b253-c66661c9e6ef-kube-api-access-sv46k\") pod \"ingress-canary-9qzcp\" (UID: \"d1001856-7024-4079-b253-c66661c9e6ef\") " pod="openshift-ingress-canary/ingress-canary-9qzcp" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.173768 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d0d0d8ae-0f47-4929-855b-60798f0d6bd3-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jw77k\" (UID: \"d0d0d8ae-0f47-4929-855b-60798f0d6bd3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.173809 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/21f370a9-11c1-4b16-9610-ced611035357-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.173920 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxsl7\" (UniqueName: \"kubernetes.io/projected/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-kube-api-access-hxsl7\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.179452 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-v4qw8"] Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.185908 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.195967 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6w5k9"] Dec 04 15:05:02 crc kubenswrapper[4946]: W1204 15:05:02.202059 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12dc3c7b_da6c_46a0_b0c9_d0899e46837a.slice/crio-e2d891f4735f62c1b4f5c9a46d1a05bd7bfc9419885389feaa5b027a95708a3c WatchSource:0}: Error finding container e2d891f4735f62c1b4f5c9a46d1a05bd7bfc9419885389feaa5b027a95708a3c: Status 404 returned error can't find the container with id e2d891f4735f62c1b4f5c9a46d1a05bd7bfc9419885389feaa5b027a95708a3c Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.204104 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.226443 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.246497 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.266875 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.276350 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.276700 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdkqd\" (UniqueName: \"kubernetes.io/projected/1984eac2-4bb6-4512-b134-d8bf2588db46-kube-api-access-fdkqd\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmpsb\" (UID: \"1984eac2-4bb6-4512-b134-d8bf2588db46\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.276750 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0ae46332-ca8f-4850-96bc-ca2d408b51d3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.276780 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/488929dd-9d70-4b9f-b41b-40be79becc36-stats-auth\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.276821 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/21f370a9-11c1-4b16-9610-ced611035357-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.276855 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca64a188-72ed-4efc-820f-507e4f6e1d35-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vsskd\" (UID: \"ca64a188-72ed-4efc-820f-507e4f6e1d35\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.276912 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:02.776872522 +0000 UTC m=+153.662916153 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.276979 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1984eac2-4bb6-4512-b134-d8bf2588db46-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmpsb\" (UID: \"1984eac2-4bb6-4512-b134-d8bf2588db46\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.277042 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0d0d8ae-0f47-4929-855b-60798f0d6bd3-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jw77k\" (UID: \"d0d0d8ae-0f47-4929-855b-60798f0d6bd3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.277070 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/70d4f1c0-1b54-4178-a025-d95419215a08-certs\") pod \"machine-config-server-qdrtz\" (UID: \"70d4f1c0-1b54-4178-a025-d95419215a08\") " pod="openshift-machine-config-operator/machine-config-server-qdrtz" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.277167 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-trusted-ca\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.277202 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-csi-data-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.277222 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf57c392-1dbe-4c73-96ea-98146b808571-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fs4wh\" (UID: \"bf57c392-1dbe-4c73-96ea-98146b808571\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.278620 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0ae46332-ca8f-4850-96bc-ca2d408b51d3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.279156 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-trusted-ca\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.279522 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/be182b12-eeb7-4695-b7e4-247044da76cf-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rvvcq\" (UID: \"be182b12-eeb7-4695-b7e4-247044da76cf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.279592 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m45sc\" (UniqueName: \"kubernetes.io/projected/44d826fd-fefe-496b-9ee3-b6ea83d8227a-kube-api-access-m45sc\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.279944 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv46k\" (UniqueName: \"kubernetes.io/projected/d1001856-7024-4079-b253-c66661c9e6ef-kube-api-access-sv46k\") pod \"ingress-canary-9qzcp\" (UID: \"d1001856-7024-4079-b253-c66661c9e6ef\") " pod="openshift-ingress-canary/ingress-canary-9qzcp" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.279976 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d0d0d8ae-0f47-4929-855b-60798f0d6bd3-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jw77k\" (UID: \"d0d0d8ae-0f47-4929-855b-60798f0d6bd3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280283 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/21f370a9-11c1-4b16-9610-ced611035357-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280326 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxsl7\" (UniqueName: \"kubernetes.io/projected/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-kube-api-access-hxsl7\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280370 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/18864978-0492-4497-913e-283bf542b579-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xwt27\" (UID: \"18864978-0492-4497-913e-283bf542b579\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280403 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55893ee8-f0d7-4019-9522-45e8db696972-serving-cert\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280433 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mhr4\" (UniqueName: \"kubernetes.io/projected/3c6657f5-af17-443b-882d-3e345029eac5-kube-api-access-9mhr4\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280484 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-metrics-tls\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280523 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8skgd\" (UniqueName: \"kubernetes.io/projected/70d4f1c0-1b54-4178-a025-d95419215a08-kube-api-access-8skgd\") pod \"machine-config-server-qdrtz\" (UID: \"70d4f1c0-1b54-4178-a025-d95419215a08\") " pod="openshift-machine-config-operator/machine-config-server-qdrtz" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280555 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aca3dc1d-be1a-49f6-86e8-0addede99412-auth-proxy-config\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280589 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7xq9\" (UniqueName: \"kubernetes.io/projected/90dd2744-a408-4164-bd61-88f44a4dc1ef-kube-api-access-r7xq9\") pod \"machine-config-controller-84d6567774-q85sw\" (UID: \"90dd2744-a408-4164-bd61-88f44a4dc1ef\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280616 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/55893ee8-f0d7-4019-9522-45e8db696972-etcd-service-ca\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280646 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/488929dd-9d70-4b9f-b41b-40be79becc36-metrics-certs\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280673 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kcnh\" (UniqueName: \"kubernetes.io/projected/447805f5-6492-4c42-95a5-ebfd9af1cf87-kube-api-access-8kcnh\") pod \"dns-default-v66dj\" (UID: \"447805f5-6492-4c42-95a5-ebfd9af1cf87\") " pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280708 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-bound-sa-token\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.280736 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b24b049-2010-4a5d-813a-55a70bc07eaf-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-cjfn2\" (UID: \"5b24b049-2010-4a5d-813a-55a70bc07eaf\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.281841 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aca3dc1d-be1a-49f6-86e8-0addede99412-auth-proxy-config\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.281858 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44d826fd-fefe-496b-9ee3-b6ea83d8227a-webhook-cert\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.281974 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1984eac2-4bb6-4512-b134-d8bf2588db46-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmpsb\" (UID: \"1984eac2-4bb6-4512-b134-d8bf2588db46\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282017 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-registration-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282058 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-tls\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282087 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc2nr\" (UniqueName: \"kubernetes.io/projected/7f0be614-bbcc-46e2-b1ee-2944e087d3f4-kube-api-access-jc2nr\") pod \"migrator-59844c95c7-2bxm7\" (UID: \"7f0be614-bbcc-46e2-b1ee-2944e087d3f4\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282134 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdnqr\" (UniqueName: \"kubernetes.io/projected/488929dd-9d70-4b9f-b41b-40be79becc36-kube-api-access-gdnqr\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282163 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca64a188-72ed-4efc-820f-507e4f6e1d35-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vsskd\" (UID: \"ca64a188-72ed-4efc-820f-507e4f6e1d35\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282213 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b24b049-2010-4a5d-813a-55a70bc07eaf-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-cjfn2\" (UID: \"5b24b049-2010-4a5d-813a-55a70bc07eaf\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282296 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/488929dd-9d70-4b9f-b41b-40be79becc36-default-certificate\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282329 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/447805f5-6492-4c42-95a5-ebfd9af1cf87-metrics-tls\") pod \"dns-default-v66dj\" (UID: \"447805f5-6492-4c42-95a5-ebfd9af1cf87\") " pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282373 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d1001856-7024-4079-b253-c66661c9e6ef-cert\") pod \"ingress-canary-9qzcp\" (UID: \"d1001856-7024-4079-b253-c66661c9e6ef\") " pod="openshift-ingress-canary/ingress-canary-9qzcp" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282400 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrk5d\" (UniqueName: \"kubernetes.io/projected/18864978-0492-4497-913e-283bf542b579-kube-api-access-jrk5d\") pod \"openshift-config-operator-7777fb866f-xwt27\" (UID: \"18864978-0492-4497-913e-283bf542b579\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282427 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttf6v\" (UniqueName: \"kubernetes.io/projected/55893ee8-f0d7-4019-9522-45e8db696972-kube-api-access-ttf6v\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282463 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18864978-0492-4497-913e-283bf542b579-serving-cert\") pod \"openshift-config-operator-7777fb866f-xwt27\" (UID: \"18864978-0492-4497-913e-283bf542b579\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282479 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d0d0d8ae-0f47-4929-855b-60798f0d6bd3-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jw77k\" (UID: \"d0d0d8ae-0f47-4929-855b-60798f0d6bd3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282514 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-socket-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282594 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djdz2\" (UniqueName: \"kubernetes.io/projected/aca3dc1d-be1a-49f6-86e8-0addede99412-kube-api-access-djdz2\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282679 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/650a1e58-9737-4c8a-b9aa-5529ca970fa6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nbwkf\" (UID: \"650a1e58-9737-4c8a-b9aa-5529ca970fa6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.282190 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/70d4f1c0-1b54-4178-a025-d95419215a08-certs\") pod \"machine-config-server-qdrtz\" (UID: \"70d4f1c0-1b54-4178-a025-d95419215a08\") " pod="openshift-machine-config-operator/machine-config-server-qdrtz" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.283806 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-metrics-tls\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.284685 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/21f370a9-11c1-4b16-9610-ced611035357-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.285090 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.285946 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b24b049-2010-4a5d-813a-55a70bc07eaf-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-cjfn2\" (UID: \"5b24b049-2010-4a5d-813a-55a70bc07eaf\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.286663 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b24b049-2010-4a5d-813a-55a70bc07eaf-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-cjfn2\" (UID: \"5b24b049-2010-4a5d-813a-55a70bc07eaf\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.287246 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d8084cb-4410-402a-a217-18dc2de50fd7-serving-cert\") pod \"service-ca-operator-777779d784-2bms7\" (UID: \"8d8084cb-4410-402a-a217-18dc2de50fd7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.287328 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pfqn\" (UniqueName: \"kubernetes.io/projected/8d8084cb-4410-402a-a217-18dc2de50fd7-kube-api-access-5pfqn\") pod \"service-ca-operator-777779d784-2bms7\" (UID: \"8d8084cb-4410-402a-a217-18dc2de50fd7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.287378 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/70d4f1c0-1b54-4178-a025-d95419215a08-node-bootstrap-token\") pod \"machine-config-server-qdrtz\" (UID: \"70d4f1c0-1b54-4178-a025-d95419215a08\") " pod="openshift-machine-config-operator/machine-config-server-qdrtz" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.287400 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aca3dc1d-be1a-49f6-86e8-0addede99412-proxy-tls\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.287434 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0ae46332-ca8f-4850-96bc-ca2d408b51d3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.287459 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/21f370a9-11c1-4b16-9610-ced611035357-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.287484 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.287502 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0d0d8ae-0f47-4929-855b-60798f0d6bd3-config\") pod \"kube-apiserver-operator-766d6c64bb-jw77k\" (UID: \"d0d0d8ae-0f47-4929-855b-60798f0d6bd3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.287734 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/650a1e58-9737-4c8a-b9aa-5529ca970fa6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nbwkf\" (UID: \"650a1e58-9737-4c8a-b9aa-5529ca970fa6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.287947 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d1001856-7024-4079-b253-c66661c9e6ef-cert\") pod \"ingress-canary-9qzcp\" (UID: \"d1001856-7024-4079-b253-c66661c9e6ef\") " pod="openshift-ingress-canary/ingress-canary-9qzcp" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.290132 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-tls\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.287421 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44d826fd-fefe-496b-9ee3-b6ea83d8227a-webhook-cert\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.295507 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/be182b12-eeb7-4695-b7e4-247044da76cf-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rvvcq\" (UID: \"be182b12-eeb7-4695-b7e4-247044da76cf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.300715 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.302410 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0d0d8ae-0f47-4929-855b-60798f0d6bd3-config\") pod \"kube-apiserver-operator-766d6c64bb-jw77k\" (UID: \"d0d0d8ae-0f47-4929-855b-60798f0d6bd3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.303867 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv"] Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304224 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v2bg\" (UniqueName: \"kubernetes.io/projected/be182b12-eeb7-4695-b7e4-247044da76cf-kube-api-access-7v2bg\") pod \"control-plane-machine-set-operator-78cbb6b69f-rvvcq\" (UID: \"be182b12-eeb7-4695-b7e4-247044da76cf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304544 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f64cw\" (UniqueName: \"kubernetes.io/projected/21f370a9-11c1-4b16-9610-ced611035357-kube-api-access-f64cw\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304600 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xqf6\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-kube-api-access-7xqf6\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304631 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7478l\" (UniqueName: \"kubernetes.io/projected/650a1e58-9737-4c8a-b9aa-5529ca970fa6-kube-api-access-7478l\") pod \"package-server-manager-789f6589d5-nbwkf\" (UID: \"650a1e58-9737-4c8a-b9aa-5529ca970fa6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304650 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-trusted-ca\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304675 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/55893ee8-f0d7-4019-9522-45e8db696972-etcd-client\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304695 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-plugins-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304811 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/55893ee8-f0d7-4019-9522-45e8db696972-etcd-ca\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304831 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-mountpoint-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304853 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d8084cb-4410-402a-a217-18dc2de50fd7-config\") pod \"service-ca-operator-777779d784-2bms7\" (UID: \"8d8084cb-4410-402a-a217-18dc2de50fd7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304883 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzrn4\" (UniqueName: \"kubernetes.io/projected/bf57c392-1dbe-4c73-96ea-98146b808571-kube-api-access-wzrn4\") pod \"multus-admission-controller-857f4d67dd-fs4wh\" (UID: \"bf57c392-1dbe-4c73-96ea-98146b808571\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.304881 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/21f370a9-11c1-4b16-9610-ced611035357-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.310588 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/447805f5-6492-4c42-95a5-ebfd9af1cf87-config-volume\") pod \"dns-default-v66dj\" (UID: \"447805f5-6492-4c42-95a5-ebfd9af1cf87\") " pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.311152 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/90dd2744-a408-4164-bd61-88f44a4dc1ef-proxy-tls\") pod \"machine-config-controller-84d6567774-q85sw\" (UID: \"90dd2744-a408-4164-bd61-88f44a4dc1ef\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.311196 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58cbv\" (UniqueName: \"kubernetes.io/projected/53433a4f-ccda-4c5c-9dca-7389ec6d741c-kube-api-access-58cbv\") pod \"downloads-7954f5f757-w56q9\" (UID: \"53433a4f-ccda-4c5c-9dca-7389ec6d741c\") " pod="openshift-console/downloads-7954f5f757-w56q9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.311374 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.312590 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/70d4f1c0-1b54-4178-a025-d95419215a08-node-bootstrap-token\") pod \"machine-config-server-qdrtz\" (UID: \"70d4f1c0-1b54-4178-a025-d95419215a08\") " pod="openshift-machine-config-operator/machine-config-server-qdrtz" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.313236 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/aca3dc1d-be1a-49f6-86e8-0addede99412-images\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.313342 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj8fs\" (UniqueName: \"kubernetes.io/projected/5b24b049-2010-4a5d-813a-55a70bc07eaf-kube-api-access-tj8fs\") pod \"kube-storage-version-migrator-operator-b67b599dd-cjfn2\" (UID: \"5b24b049-2010-4a5d-813a-55a70bc07eaf\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.313729 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/488929dd-9d70-4b9f-b41b-40be79becc36-service-ca-bundle\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.313770 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-certificates\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.313801 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44d826fd-fefe-496b-9ee3-b6ea83d8227a-apiservice-cert\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.313842 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/90dd2744-a408-4164-bd61-88f44a4dc1ef-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-q85sw\" (UID: \"90dd2744-a408-4164-bd61-88f44a4dc1ef\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.313868 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55893ee8-f0d7-4019-9522-45e8db696972-config\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.313903 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.313931 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/44d826fd-fefe-496b-9ee3-b6ea83d8227a-tmpfs\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.313963 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca64a188-72ed-4efc-820f-507e4f6e1d35-config\") pod \"kube-controller-manager-operator-78b949d7b-vsskd\" (UID: \"ca64a188-72ed-4efc-820f-507e4f6e1d35\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.314838 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/90dd2744-a408-4164-bd61-88f44a4dc1ef-proxy-tls\") pod \"machine-config-controller-84d6567774-q85sw\" (UID: \"90dd2744-a408-4164-bd61-88f44a4dc1ef\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.317231 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/90dd2744-a408-4164-bd61-88f44a4dc1ef-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-q85sw\" (UID: \"90dd2744-a408-4164-bd61-88f44a4dc1ef\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.317521 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0ae46332-ca8f-4850-96bc-ca2d408b51d3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.321133 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44d826fd-fefe-496b-9ee3-b6ea83d8227a-apiservice-cert\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.321503 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/44d826fd-fefe-496b-9ee3-b6ea83d8227a-tmpfs\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.321990 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:02.821968808 +0000 UTC m=+153.708012449 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.323700 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-trusted-ca\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.330146 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.344572 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 04 15:05:02 crc kubenswrapper[4946]: W1204 15:05:02.364677 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ed0f3fb_346c_4409_8d05_6286b8151dd2.slice/crio-d7dba077bed7af264d5907e3b634d38acdac339a3d1d630c073e71dbaae6b53c WatchSource:0}: Error finding container d7dba077bed7af264d5907e3b634d38acdac339a3d1d630c073e71dbaae6b53c: Status 404 returned error can't find the container with id d7dba077bed7af264d5907e3b634d38acdac339a3d1d630c073e71dbaae6b53c Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.379511 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dkjqm"] Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.391736 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" event={"ID":"93885d87-d68e-4445-abd6-ece851137b17","Type":"ContainerStarted","Data":"94005a1343f7908b304644fadf0c4c13fab36e6b6933b63dfb088ae1aa4162cc"} Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.393089 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.394100 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" event={"ID":"9ed0f3fb-346c-4409-8d05-6286b8151dd2","Type":"ContainerStarted","Data":"d7dba077bed7af264d5907e3b634d38acdac339a3d1d630c073e71dbaae6b53c"} Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.395503 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-v4qw8" event={"ID":"12dc3c7b-da6c-46a0-b0c9-d0899e46837a","Type":"ContainerStarted","Data":"e2d891f4735f62c1b4f5c9a46d1a05bd7bfc9419885389feaa5b027a95708a3c"} Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.397776 4946 generic.go:334] "Generic (PLEG): container finished" podID="d0437519-c01e-4b89-a007-8fda5902ea9f" containerID="457c27b9598bed31df9d485bbc9f6e45b4c976385554355417d259f58b1676ff" exitCode=0 Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.397837 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" event={"ID":"d0437519-c01e-4b89-a007-8fda5902ea9f","Type":"ContainerDied","Data":"457c27b9598bed31df9d485bbc9f6e45b4c976385554355417d259f58b1676ff"} Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.400029 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b"] Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.407642 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m45sc\" (UniqueName: \"kubernetes.io/projected/44d826fd-fefe-496b-9ee3-b6ea83d8227a-kube-api-access-m45sc\") pod \"packageserver-d55dfcdfc-jvfnf\" (UID: \"44d826fd-fefe-496b-9ee3-b6ea83d8227a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.417058 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.417307 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:02.917272994 +0000 UTC m=+153.803316635 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.417593 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzrn4\" (UniqueName: \"kubernetes.io/projected/bf57c392-1dbe-4c73-96ea-98146b808571-kube-api-access-wzrn4\") pod \"multus-admission-controller-857f4d67dd-fs4wh\" (UID: \"bf57c392-1dbe-4c73-96ea-98146b808571\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.417709 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/447805f5-6492-4c42-95a5-ebfd9af1cf87-config-volume\") pod \"dns-default-v66dj\" (UID: \"447805f5-6492-4c42-95a5-ebfd9af1cf87\") " pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.417876 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/488929dd-9d70-4b9f-b41b-40be79becc36-service-ca-bundle\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.418015 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55893ee8-f0d7-4019-9522-45e8db696972-config\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.418145 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.418245 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca64a188-72ed-4efc-820f-507e4f6e1d35-config\") pod \"kube-controller-manager-operator-78b949d7b-vsskd\" (UID: \"ca64a188-72ed-4efc-820f-507e4f6e1d35\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.418342 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdkqd\" (UniqueName: \"kubernetes.io/projected/1984eac2-4bb6-4512-b134-d8bf2588db46-kube-api-access-fdkqd\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmpsb\" (UID: \"1984eac2-4bb6-4512-b134-d8bf2588db46\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.418434 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/488929dd-9d70-4b9f-b41b-40be79becc36-stats-auth\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.418556 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca64a188-72ed-4efc-820f-507e4f6e1d35-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vsskd\" (UID: \"ca64a188-72ed-4efc-820f-507e4f6e1d35\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.418662 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1984eac2-4bb6-4512-b134-d8bf2588db46-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmpsb\" (UID: \"1984eac2-4bb6-4512-b134-d8bf2588db46\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.418770 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-csi-data-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.418871 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf57c392-1dbe-4c73-96ea-98146b808571-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fs4wh\" (UID: \"bf57c392-1dbe-4c73-96ea-98146b808571\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.419020 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/18864978-0492-4497-913e-283bf542b579-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xwt27\" (UID: \"18864978-0492-4497-913e-283bf542b579\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.419172 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55893ee8-f0d7-4019-9522-45e8db696972-serving-cert\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.419273 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mhr4\" (UniqueName: \"kubernetes.io/projected/3c6657f5-af17-443b-882d-3e345029eac5-kube-api-access-9mhr4\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.419562 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/55893ee8-f0d7-4019-9522-45e8db696972-etcd-service-ca\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.419698 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/488929dd-9d70-4b9f-b41b-40be79becc36-metrics-certs\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.420380 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kcnh\" (UniqueName: \"kubernetes.io/projected/447805f5-6492-4c42-95a5-ebfd9af1cf87-kube-api-access-8kcnh\") pod \"dns-default-v66dj\" (UID: \"447805f5-6492-4c42-95a5-ebfd9af1cf87\") " pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.420554 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1984eac2-4bb6-4512-b134-d8bf2588db46-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmpsb\" (UID: \"1984eac2-4bb6-4512-b134-d8bf2588db46\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.420657 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-registration-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.420781 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdnqr\" (UniqueName: \"kubernetes.io/projected/488929dd-9d70-4b9f-b41b-40be79becc36-kube-api-access-gdnqr\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.420888 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca64a188-72ed-4efc-820f-507e4f6e1d35-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vsskd\" (UID: \"ca64a188-72ed-4efc-820f-507e4f6e1d35\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.420962 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/488929dd-9d70-4b9f-b41b-40be79becc36-service-ca-bundle\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.421194 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" event={"ID":"1f998c86-99a4-4416-b810-b40a8fb1775f","Type":"ContainerStarted","Data":"7a3add17eb2833c5217722896513f5427b4dfd76abcb2c29f862310da944c245"} Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.423464 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.421988 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:02.921967403 +0000 UTC m=+153.808011224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.423774 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/447805f5-6492-4c42-95a5-ebfd9af1cf87-metrics-tls\") pod \"dns-default-v66dj\" (UID: \"447805f5-6492-4c42-95a5-ebfd9af1cf87\") " pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.423904 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrk5d\" (UniqueName: \"kubernetes.io/projected/18864978-0492-4497-913e-283bf542b579-kube-api-access-jrk5d\") pod \"openshift-config-operator-7777fb866f-xwt27\" (UID: \"18864978-0492-4497-913e-283bf542b579\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.424003 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttf6v\" (UniqueName: \"kubernetes.io/projected/55893ee8-f0d7-4019-9522-45e8db696972-kube-api-access-ttf6v\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.424096 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/488929dd-9d70-4b9f-b41b-40be79becc36-default-certificate\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.424214 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18864978-0492-4497-913e-283bf542b579-serving-cert\") pod \"openshift-config-operator-7777fb866f-xwt27\" (UID: \"18864978-0492-4497-913e-283bf542b579\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.424353 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-socket-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.424764 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d8084cb-4410-402a-a217-18dc2de50fd7-serving-cert\") pod \"service-ca-operator-777779d784-2bms7\" (UID: \"8d8084cb-4410-402a-a217-18dc2de50fd7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.424937 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pfqn\" (UniqueName: \"kubernetes.io/projected/8d8084cb-4410-402a-a217-18dc2de50fd7-kube-api-access-5pfqn\") pod \"service-ca-operator-777779d784-2bms7\" (UID: \"8d8084cb-4410-402a-a217-18dc2de50fd7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.425254 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/55893ee8-f0d7-4019-9522-45e8db696972-etcd-client\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.425384 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-plugins-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.425512 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/55893ee8-f0d7-4019-9522-45e8db696972-etcd-ca\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.425627 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-mountpoint-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.425729 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d8084cb-4410-402a-a217-18dc2de50fd7-config\") pod \"service-ca-operator-777779d784-2bms7\" (UID: \"8d8084cb-4410-402a-a217-18dc2de50fd7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.426631 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d8084cb-4410-402a-a217-18dc2de50fd7-config\") pod \"service-ca-operator-777779d784-2bms7\" (UID: \"8d8084cb-4410-402a-a217-18dc2de50fd7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.426882 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-csi-data-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.427477 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" event={"ID":"3539ee4e-f397-45e0-b449-93b150766448","Type":"ContainerStarted","Data":"b750f95b8059f59409b78af9eb9ab27edf787cee444a0a2a7f8bee90ce8787d8"} Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.427707 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.428129 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/18864978-0492-4497-913e-283bf542b579-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xwt27\" (UID: \"18864978-0492-4497-913e-283bf542b579\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.432006 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d8084cb-4410-402a-a217-18dc2de50fd7-serving-cert\") pod \"service-ca-operator-777779d784-2bms7\" (UID: \"8d8084cb-4410-402a-a217-18dc2de50fd7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.433515 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv46k\" (UniqueName: \"kubernetes.io/projected/d1001856-7024-4079-b253-c66661c9e6ef-kube-api-access-sv46k\") pod \"ingress-canary-9qzcp\" (UID: \"d1001856-7024-4079-b253-c66661c9e6ef\") " pod="openshift-ingress-canary/ingress-canary-9qzcp" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.434508 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bf57c392-1dbe-4c73-96ea-98146b808571-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fs4wh\" (UID: \"bf57c392-1dbe-4c73-96ea-98146b808571\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.435015 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-socket-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.434752 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18864978-0492-4497-913e-283bf542b579-serving-cert\") pod \"openshift-config-operator-7777fb866f-xwt27\" (UID: \"18864978-0492-4497-913e-283bf542b579\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.434848 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-mountpoint-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.434729 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/447805f5-6492-4c42-95a5-ebfd9af1cf87-metrics-tls\") pod \"dns-default-v66dj\" (UID: \"447805f5-6492-4c42-95a5-ebfd9af1cf87\") " pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.435385 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-plugins-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.420238 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/447805f5-6492-4c42-95a5-ebfd9af1cf87-config-volume\") pod \"dns-default-v66dj\" (UID: \"447805f5-6492-4c42-95a5-ebfd9af1cf87\") " pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.435954 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" event={"ID":"fb5b0fbe-569f-4edb-a5da-c1d37eec5981","Type":"ContainerStarted","Data":"2f4be03fe61300ce1730a0088eb1f61493a9b5503a6d859e5676f756a956ed49"} Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.421620 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55893ee8-f0d7-4019-9522-45e8db696972-config\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.436460 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3c6657f5-af17-443b-882d-3e345029eac5-registration-dir\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.422873 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca64a188-72ed-4efc-820f-507e4f6e1d35-config\") pod \"kube-controller-manager-operator-78b949d7b-vsskd\" (UID: \"ca64a188-72ed-4efc-820f-507e4f6e1d35\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.439186 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-w8nz2" event={"ID":"0cd92273-2253-49c3-9e31-0da3e687c206","Type":"ContainerStarted","Data":"0a3805387a9429003d5c17467fcf237c73ee4d45f0af3a5187e43ceb138bbea9"} Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.439343 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-w8nz2" event={"ID":"0cd92273-2253-49c3-9e31-0da3e687c206","Type":"ContainerStarted","Data":"3bec1397e7212e512baed3154db802e824078ccd81f3c4762f89ae6c1b591852"} Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.440562 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/488929dd-9d70-4b9f-b41b-40be79becc36-stats-auth\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.440782 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/488929dd-9d70-4b9f-b41b-40be79becc36-metrics-certs\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.440851 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.441455 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" event={"ID":"9c28e21c-79cb-4fe0-b8f3-247fbce0640c","Type":"ContainerStarted","Data":"1e3838e41f91698e22c4bc84f3994d4ebb343f1783ad111b6e5993234b5b6903"} Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.442749 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1984eac2-4bb6-4512-b134-d8bf2588db46-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmpsb\" (UID: \"1984eac2-4bb6-4512-b134-d8bf2588db46\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.447589 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d0d0d8ae-0f47-4929-855b-60798f0d6bd3-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jw77k\" (UID: \"d0d0d8ae-0f47-4929-855b-60798f0d6bd3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.447593 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4t8l"] Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.448373 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/488929dd-9d70-4b9f-b41b-40be79becc36-default-certificate\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.450547 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" event={"ID":"42d67fa8-e84e-4a09-a51d-c63365c274c5","Type":"ContainerStarted","Data":"c2be90aadc4b9d3105ea4729c289f24a00738e4b5be6be3c8ad2ec07035a96a0"} Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.450747 4946 patch_prober.go:28] interesting pod/console-operator-58897d9998-w8nz2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.450794 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-w8nz2" podUID="0cd92273-2253-49c3-9e31-0da3e687c206" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.479013 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxsl7\" (UniqueName: \"kubernetes.io/projected/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-kube-api-access-hxsl7\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.517809 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-bound-sa-token\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.526491 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.526711 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.026681588 +0000 UTC m=+153.912725239 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.527377 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.527447 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgh74\" (UniqueName: \"kubernetes.io/projected/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-kube-api-access-kgh74\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.527532 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s98w\" (UniqueName: \"kubernetes.io/projected/65dc1ade-ddd4-4a22-99bd-780112f318f9-kube-api-access-2s98w\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.530746 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.030736858 +0000 UTC m=+153.916780499 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.534347 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s98w\" (UniqueName: \"kubernetes.io/projected/65dc1ade-ddd4-4a22-99bd-780112f318f9-kube-api-access-2s98w\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.536631 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgh74\" (UniqueName: \"kubernetes.io/projected/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-kube-api-access-kgh74\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.539130 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7xq9\" (UniqueName: \"kubernetes.io/projected/90dd2744-a408-4164-bd61-88f44a4dc1ef-kube-api-access-r7xq9\") pod \"machine-config-controller-84d6567774-q85sw\" (UID: \"90dd2744-a408-4164-bd61-88f44a4dc1ef\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.558854 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc2nr\" (UniqueName: \"kubernetes.io/projected/7f0be614-bbcc-46e2-b1ee-2944e087d3f4-kube-api-access-jc2nr\") pod \"migrator-59844c95c7-2bxm7\" (UID: \"7f0be614-bbcc-46e2-b1ee-2944e087d3f4\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.580186 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djdz2\" (UniqueName: \"kubernetes.io/projected/aca3dc1d-be1a-49f6-86e8-0addede99412-kube-api-access-djdz2\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.600285 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bbd993f8-1caa-4fc1-9d39-a9524fe1e48b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7gdsg\" (UID: \"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.623087 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v2bg\" (UniqueName: \"kubernetes.io/projected/be182b12-eeb7-4695-b7e4-247044da76cf-kube-api-access-7v2bg\") pod \"control-plane-machine-set-operator-78cbb6b69f-rvvcq\" (UID: \"be182b12-eeb7-4695-b7e4-247044da76cf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.628468 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.628673 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.128619335 +0000 UTC m=+154.014662986 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.628821 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.629274 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.129263236 +0000 UTC m=+154.015306877 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.635885 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55893ee8-f0d7-4019-9522-45e8db696972-serving-cert\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.635984 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/55893ee8-f0d7-4019-9522-45e8db696972-etcd-ca\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.636349 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/55893ee8-f0d7-4019-9522-45e8db696972-etcd-client\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.636777 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aca3dc1d-be1a-49f6-86e8-0addede99412-proxy-tls\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.637553 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/aca3dc1d-be1a-49f6-86e8-0addede99412-images\") pod \"machine-config-operator-74547568cd-qhg9d\" (UID: \"aca3dc1d-be1a-49f6-86e8-0addede99412\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.638154 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/55893ee8-f0d7-4019-9522-45e8db696972-etcd-service-ca\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.638317 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/21f370a9-11c1-4b16-9610-ced611035357-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.638487 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1984eac2-4bb6-4512-b134-d8bf2588db46-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmpsb\" (UID: \"1984eac2-4bb6-4512-b134-d8bf2588db46\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.640163 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-certificates\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.642071 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca64a188-72ed-4efc-820f-507e4f6e1d35-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vsskd\" (UID: \"ca64a188-72ed-4efc-820f-507e4f6e1d35\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.643348 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f64cw\" (UniqueName: \"kubernetes.io/projected/21f370a9-11c1-4b16-9610-ced611035357-kube-api-access-f64cw\") pod \"cluster-image-registry-operator-dc59b4c8b-gst2d\" (UID: \"21f370a9-11c1-4b16-9610-ced611035357\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.647442 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8skgd\" (UniqueName: \"kubernetes.io/projected/70d4f1c0-1b54-4178-a025-d95419215a08-kube-api-access-8skgd\") pod \"machine-config-server-qdrtz\" (UID: \"70d4f1c0-1b54-4178-a025-d95419215a08\") " pod="openshift-machine-config-operator/machine-config-server-qdrtz" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.666322 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.671378 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.682438 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.695204 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xqf6\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-kube-api-access-7xqf6\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.705686 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58cbv\" (UniqueName: \"kubernetes.io/projected/53433a4f-ccda-4c5c-9dca-7389ec6d741c-kube-api-access-58cbv\") pod \"downloads-7954f5f757-w56q9\" (UID: \"53433a4f-ccda-4c5c-9dca-7389ec6d741c\") " pod="openshift-console/downloads-7954f5f757-w56q9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.712471 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9qzcp" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.724260 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj8fs\" (UniqueName: \"kubernetes.io/projected/5b24b049-2010-4a5d-813a-55a70bc07eaf-kube-api-access-tj8fs\") pod \"kube-storage-version-migrator-operator-b67b599dd-cjfn2\" (UID: \"5b24b049-2010-4a5d-813a-55a70bc07eaf\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.728355 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.729745 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.730329 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.230304734 +0000 UTC m=+154.116348375 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.736063 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-qdrtz" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.745521 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.761680 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.770130 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdkqd\" (UniqueName: \"kubernetes.io/projected/1984eac2-4bb6-4512-b134-d8bf2588db46-kube-api-access-fdkqd\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmpsb\" (UID: \"1984eac2-4bb6-4512-b134-d8bf2588db46\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.770934 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7478l\" (UniqueName: \"kubernetes.io/projected/650a1e58-9737-4c8a-b9aa-5529ca970fa6-kube-api-access-7478l\") pod \"package-server-manager-789f6589d5-nbwkf\" (UID: \"650a1e58-9737-4c8a-b9aa-5529ca970fa6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.772827 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.783523 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.789497 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzrn4\" (UniqueName: \"kubernetes.io/projected/bf57c392-1dbe-4c73-96ea-98146b808571-kube-api-access-wzrn4\") pod \"multus-admission-controller-857f4d67dd-fs4wh\" (UID: \"bf57c392-1dbe-4c73-96ea-98146b808571\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.804543 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.805174 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.812826 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mhr4\" (UniqueName: \"kubernetes.io/projected/3c6657f5-af17-443b-882d-3e345029eac5-kube-api-access-9mhr4\") pod \"csi-hostpathplugin-wm6jt\" (UID: \"3c6657f5-af17-443b-882d-3e345029eac5\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.824445 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.832436 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.833066 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.333049766 +0000 UTC m=+154.219093397 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.848297 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.848788 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca64a188-72ed-4efc-820f-507e4f6e1d35-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vsskd\" (UID: \"ca64a188-72ed-4efc-820f-507e4f6e1d35\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.854695 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pfqn\" (UniqueName: \"kubernetes.io/projected/8d8084cb-4410-402a-a217-18dc2de50fd7-kube-api-access-5pfqn\") pod \"service-ca-operator-777779d784-2bms7\" (UID: \"8d8084cb-4410-402a-a217-18dc2de50fd7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.875724 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.884759 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrk5d\" (UniqueName: \"kubernetes.io/projected/18864978-0492-4497-913e-283bf542b579-kube-api-access-jrk5d\") pod \"openshift-config-operator-7777fb866f-xwt27\" (UID: \"18864978-0492-4497-913e-283bf542b579\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.928504 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.937484 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:02 crc kubenswrapper[4946]: E1204 15:05:02.937935 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.437915156 +0000 UTC m=+154.323958797 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.937991 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdnqr\" (UniqueName: \"kubernetes.io/projected/488929dd-9d70-4b9f-b41b-40be79becc36-kube-api-access-gdnqr\") pod \"router-default-5444994796-zzdp9\" (UID: \"488929dd-9d70-4b9f-b41b-40be79becc36\") " pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:02 crc kubenswrapper[4946]: I1204 15:05:02.998636 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-w56q9" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.001779 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kcnh\" (UniqueName: \"kubernetes.io/projected/447805f5-6492-4c42-95a5-ebfd9af1cf87-kube-api-access-8kcnh\") pod \"dns-default-v66dj\" (UID: \"447805f5-6492-4c42-95a5-ebfd9af1cf87\") " pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.003456 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.048126 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.049577 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.549561432 +0000 UTC m=+154.435605073 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.132299 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf"] Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.132459 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.140613 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.149775 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.150023 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-config\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.150059 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.150081 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f02848c-6b07-4c72-8753-c34c4a3f210f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.150099 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f02848c-6b07-4c72-8753-c34c4a3f210f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.150169 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-auth-proxy-config\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.150190 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-images\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.150228 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-config\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.150300 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-serving-cert\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.151243 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.65121546 +0000 UTC m=+154.537259101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.152025 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-config\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.153517 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4ed440a8-a3c7-48da-9811-bcc77750303a-auth-proxy-config\") pod \"machine-approver-56656f9798-47szc\" (UID: \"4ed440a8-a3c7-48da-9811-bcc77750303a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.154172 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-config\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.154431 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/65dc1ade-ddd4-4a22-99bd-780112f318f9-images\") pod \"machine-api-operator-5694c8668f-7bzmc\" (UID: \"65dc1ade-ddd4-4a22-99bd-780112f318f9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.154841 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cp7w9\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.154880 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf2f88d9-df9c-4d4a-a1a1-6ab46612186e-serving-cert\") pod \"authentication-operator-69f744f599-5fshb\" (UID: \"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.158088 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.178142 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d"] Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.200937 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.251722 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.252299 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.752277408 +0000 UTC m=+154.638321049 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.289246 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.314464 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg"] Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.341984 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.352945 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.353203 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.853168732 +0000 UTC m=+154.739212383 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.353325 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.353717 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.853705319 +0000 UTC m=+154.739748970 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.371848 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.395708 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.454636 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.454895 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.954864571 +0000 UTC m=+154.840908212 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.455005 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.455387 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:03.955379437 +0000 UTC m=+154.841423208 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.458342 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" event={"ID":"c3d07ea4-a001-42fe-9405-7f9f95f5523f","Type":"ContainerStarted","Data":"c455d11f4732c0e5f2a155832cb7ecc43475486748e08bbd8879ceafcd85283f"} Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.458384 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" event={"ID":"2eebdd40-7f32-4d80-9a29-28373b288710","Type":"ContainerStarted","Data":"ef977d38cb62b0acfb5ff1cd994f02f8110761a7c55a21285ee8f0216f312ddc"} Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.458839 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" event={"ID":"0b12867e-de02-4b45-ac09-5140aab7451e","Type":"ContainerStarted","Data":"54e9595356168641523c2224b47f7c5a365f36d1a7df4038eef9589779fdd0cf"} Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.460064 4946 patch_prober.go:28] interesting pod/console-operator-58897d9998-w8nz2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.460134 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-w8nz2" podUID="0cd92273-2253-49c3-9e31-0da3e687c206" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.512058 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" podStartSLOduration=132.512036842 podStartE2EDuration="2m12.512036842s" podCreationTimestamp="2025-12-04 15:02:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:03.458773955 +0000 UTC m=+154.344817606" watchObservedRunningTime="2025-12-04 15:05:03.512036842 +0000 UTC m=+154.398080483" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.556390 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.556626 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.056590291 +0000 UTC m=+154.942633942 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.556893 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.557294 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.057280293 +0000 UTC m=+154.943323924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.622314 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttf6v\" (UniqueName: \"kubernetes.io/projected/55893ee8-f0d7-4019-9522-45e8db696972-kube-api-access-ttf6v\") pod \"etcd-operator-b45778765-lg2p8\" (UID: \"55893ee8-f0d7-4019-9522-45e8db696972\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.624224 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f02848c-6b07-4c72-8753-c34c4a3f210f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.634296 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f02848c-6b07-4c72-8753-c34c4a3f210f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jfnsb\" (UID: \"4f02848c-6b07-4c72-8753-c34c4a3f210f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.658317 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.658551 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.158513507 +0000 UTC m=+155.044557148 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.658594 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.659040 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.659435 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.159427536 +0000 UTC m=+155.045471177 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: W1204 15:05:03.685619 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44d826fd_fefe_496b_9ee3_b6ea83d8227a.slice/crio-65113a601b25498e41cf261a1963e25ba0e3eca89ea45d2614811c71ab06a951 WatchSource:0}: Error finding container 65113a601b25498e41cf261a1963e25ba0e3eca89ea45d2614811c71ab06a951: Status 404 returned error can't find the container with id 65113a601b25498e41cf261a1963e25ba0e3eca89ea45d2614811c71ab06a951 Dec 04 15:05:03 crc kubenswrapper[4946]: W1204 15:05:03.696104 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaca3dc1d_be1a_49f6_86e8_0addede99412.slice/crio-1778a03c26220a32b043a7993b8969c65e9b6ec31498e6e45b285a29a5c60892 WatchSource:0}: Error finding container 1778a03c26220a32b043a7993b8969c65e9b6ec31498e6e45b285a29a5c60892: Status 404 returned error can't find the container with id 1778a03c26220a32b043a7993b8969c65e9b6ec31498e6e45b285a29a5c60892 Dec 04 15:05:03 crc kubenswrapper[4946]: W1204 15:05:03.698201 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbbd993f8_1caa_4fc1_9d39_a9524fe1e48b.slice/crio-35408d776dba11b252ef736e8ffc47a827938a96b88b17b608e608bdd306167d WatchSource:0}: Error finding container 35408d776dba11b252ef736e8ffc47a827938a96b88b17b608e608bdd306167d: Status 404 returned error can't find the container with id 35408d776dba11b252ef736e8ffc47a827938a96b88b17b608e608bdd306167d Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.760601 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.761150 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.261130265 +0000 UTC m=+155.147173906 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.786235 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.827515 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-w8nz2" podStartSLOduration=133.827485938 podStartE2EDuration="2m13.827485938s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:03.817056416 +0000 UTC m=+154.703100057" watchObservedRunningTime="2025-12-04 15:05:03.827485938 +0000 UTC m=+154.713529579" Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.863929 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.864279 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.36426563 +0000 UTC m=+155.250309271 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.964773 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.965199 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.465176244 +0000 UTC m=+155.351219895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.965500 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:03 crc kubenswrapper[4946]: E1204 15:05:03.965897 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.465885756 +0000 UTC m=+155.351929397 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:03 crc kubenswrapper[4946]: I1204 15:05:03.989088 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d"] Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.091849 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:04 crc kubenswrapper[4946]: E1204 15:05:04.093179 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.593073807 +0000 UTC m=+155.479117458 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.193287 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:04 crc kubenswrapper[4946]: E1204 15:05:04.193696 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.693679102 +0000 UTC m=+155.579722743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.294365 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:04 crc kubenswrapper[4946]: E1204 15:05:04.294666 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.794650557 +0000 UTC m=+155.680694198 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.346762 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" podStartSLOduration=134.346739496 podStartE2EDuration="2m14.346739496s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:04.343044809 +0000 UTC m=+155.229088450" watchObservedRunningTime="2025-12-04 15:05:04.346739496 +0000 UTC m=+155.232783137" Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.395771 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:04 crc kubenswrapper[4946]: E1204 15:05:04.396231 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.896210352 +0000 UTC m=+155.782253993 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.498417 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:04 crc kubenswrapper[4946]: E1204 15:05:04.498659 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.998643254 +0000 UTC m=+155.884686895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.498877 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:04 crc kubenswrapper[4946]: E1204 15:05:04.499373 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:04.999350947 +0000 UTC m=+155.885394588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.534320 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq"] Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.556230 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7"] Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.600455 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:04 crc kubenswrapper[4946]: E1204 15:05:04.600882 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.10086653 +0000 UTC m=+155.986910171 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.702490 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:04 crc kubenswrapper[4946]: E1204 15:05:04.702838 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.202817027 +0000 UTC m=+156.088860668 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.704267 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fs4wh"] Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.803577 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:04 crc kubenswrapper[4946]: E1204 15:05:04.803913 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.303865656 +0000 UTC m=+156.189909297 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.855528 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb"] Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.888781 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" event={"ID":"9ed0f3fb-346c-4409-8d05-6286b8151dd2","Type":"ContainerStarted","Data":"bf1a53ac936623be3bfd191ebf05773edb6e32d0ac0db88cbc2a9a4bf8f14c91"} Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.915839 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:04 crc kubenswrapper[4946]: E1204 15:05:04.916263 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.416242855 +0000 UTC m=+156.302286496 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:04 crc kubenswrapper[4946]: I1204 15:05:04.940716 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wm6jt"] Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.016690 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.016960 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.51691702 +0000 UTC m=+156.402960661 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.017150 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.017436 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-v4qw8" event={"ID":"12dc3c7b-da6c-46a0-b0c9-d0899e46837a","Type":"ContainerStarted","Data":"53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742"} Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.017692 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.517680145 +0000 UTC m=+156.403723786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.023393 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k"] Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.025285 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2bms7"] Dec 04 15:05:05 crc kubenswrapper[4946]: W1204 15:05:05.026857 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf57c392_1dbe_4c73_96ea_98146b808571.slice/crio-d71aee2b91eda1ad294f0d7012758df01342061d9cccf093b0ccf0b7b2ec11db WatchSource:0}: Error finding container d71aee2b91eda1ad294f0d7012758df01342061d9cccf093b0ccf0b7b2ec11db: Status 404 returned error can't find the container with id d71aee2b91eda1ad294f0d7012758df01342061d9cccf093b0ccf0b7b2ec11db Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.027727 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" event={"ID":"44d826fd-fefe-496b-9ee3-b6ea83d8227a","Type":"ContainerStarted","Data":"65113a601b25498e41cf261a1963e25ba0e3eca89ea45d2614811c71ab06a951"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.051499 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zzdp9" event={"ID":"488929dd-9d70-4b9f-b41b-40be79becc36","Type":"ContainerStarted","Data":"805796887f42164f43b64a117b80bab4e4b9cc30b683a54be3ee064d383d77fa"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.057902 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf"] Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.062071 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" event={"ID":"aca3dc1d-be1a-49f6-86e8-0addede99412","Type":"ContainerStarted","Data":"1778a03c26220a32b043a7993b8969c65e9b6ec31498e6e45b285a29a5c60892"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.066846 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" event={"ID":"fb5b0fbe-569f-4edb-a5da-c1d37eec5981","Type":"ContainerStarted","Data":"9e0ef650f9352a91af46bea2bcdf7dec4f5c62b9432fef53539c8042203fd9ad"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.081206 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9qzcp"] Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.087842 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-w56q9"] Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.090183 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" event={"ID":"2eebdd40-7f32-4d80-9a29-28373b288710","Type":"ContainerStarted","Data":"e261383176bbc7496c8f3120b05f02a8e68283be5a7f2acb280384bf99ee1ade"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.116042 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5fshb"] Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.121715 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.122486 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.622458272 +0000 UTC m=+156.508501913 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.127643 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw"] Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.133689 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-dkjqm" podStartSLOduration=134.133656248 podStartE2EDuration="2m14.133656248s" podCreationTimestamp="2025-12-04 15:02:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:05.121299695 +0000 UTC m=+156.007343336" watchObservedRunningTime="2025-12-04 15:05:05.133656248 +0000 UTC m=+156.019699889" Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.145684 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" event={"ID":"8b20a593-dece-40b0-ae3f-12a9fabbf3e1","Type":"ContainerStarted","Data":"486d1abb437d70e91e60e7fa200531dc5ef4cc5d8980c67a9d29441dc98a5a7d"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.173370 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" event={"ID":"42d67fa8-e84e-4a09-a51d-c63365c274c5","Type":"ContainerStarted","Data":"66942f5f20e15f0c8a39b2828237e4c5c47d5cb99925f0a69829445b7584f8cd"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.181509 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" event={"ID":"c3d07ea4-a001-42fe-9405-7f9f95f5523f","Type":"ContainerStarted","Data":"088c4db0adf59a0a7fa2e24b6e0ce5a34478401bcef7f3668364e08e27ce5a49"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.181789 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.201810 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-qdrtz" event={"ID":"70d4f1c0-1b54-4178-a025-d95419215a08","Type":"ContainerStarted","Data":"49d20cffd02316d61b7a49bb49caf54bef8fef6a961402f5c679bb9b0abcda51"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.204241 4946 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-s796b container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.204319 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" podUID="c3d07ea4-a001-42fe-9405-7f9f95f5523f" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.212211 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" event={"ID":"9c28e21c-79cb-4fe0-b8f3-247fbce0640c","Type":"ContainerStarted","Data":"4656d0bedaa51169d870af586db6eaea2abeb8f959abfaf3444ed036ea78c29c"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.218015 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" event={"ID":"3539ee4e-f397-45e0-b449-93b150766448","Type":"ContainerStarted","Data":"5d7ae9d03cd81026151b88996ba7cd1d03ca8a357862a8a076795452a5f1df76"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.220424 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" podStartSLOduration=135.220408361 podStartE2EDuration="2m15.220408361s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:05.218935374 +0000 UTC m=+156.104979015" watchObservedRunningTime="2025-12-04 15:05:05.220408361 +0000 UTC m=+156.106452002" Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.223378 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.225070 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.725051479 +0000 UTC m=+156.611095120 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.227276 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" event={"ID":"21f370a9-11c1-4b16-9610-ced611035357","Type":"ContainerStarted","Data":"61fe1041b8496ad840e03155f19a5fbf59e01e130d4c2a52098ba33577d78b29"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.232787 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" event={"ID":"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b","Type":"ContainerStarted","Data":"35408d776dba11b252ef736e8ffc47a827938a96b88b17b608e608bdd306167d"} Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.232973 4946 patch_prober.go:28] interesting pod/console-operator-58897d9998-w8nz2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.233332 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-w8nz2" podUID="0cd92273-2253-49c3-9e31-0da3e687c206" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.248477 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" podStartSLOduration=135.248452875 podStartE2EDuration="2m15.248452875s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:05.243984432 +0000 UTC m=+156.130028073" watchObservedRunningTime="2025-12-04 15:05:05.248452875 +0000 UTC m=+156.134496506" Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.324531 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.324738 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.824722724 +0000 UTC m=+156.710766365 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.325426 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.325721 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.825714135 +0000 UTC m=+156.711757776 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.426775 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.427037 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.927007542 +0000 UTC m=+156.813051183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.427158 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.427575 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:05.927558909 +0000 UTC m=+156.813602550 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.488877 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2"] Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.528450 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.529068 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:06.029046801 +0000 UTC m=+156.915090442 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.630461 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.630814 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:06.130800372 +0000 UTC m=+157.016844013 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.731649 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.732043 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:06.231994775 +0000 UTC m=+157.118038416 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.732361 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.732696 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:06.232681787 +0000 UTC m=+157.118725428 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.833914 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.834478 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:06.334437578 +0000 UTC m=+157.220481219 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.850835 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xwt27"] Dec 04 15:05:05 crc kubenswrapper[4946]: W1204 15:05:05.850830 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d8084cb_4410_402a_a217_18dc2de50fd7.slice/crio-18e0b99c8ebca268a0c693e05b76bb8943eaa622475d5e3ac4347b5e189b42c6 WatchSource:0}: Error finding container 18e0b99c8ebca268a0c693e05b76bb8943eaa622475d5e3ac4347b5e189b42c6: Status 404 returned error can't find the container with id 18e0b99c8ebca268a0c693e05b76bb8943eaa622475d5e3ac4347b5e189b42c6 Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.859918 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cp7w9"] Dec 04 15:05:05 crc kubenswrapper[4946]: W1204 15:05:05.871005 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf2f88d9_df9c_4d4a_a1a1_6ab46612186e.slice/crio-44aa13b8df5c44e67ee2a05ae10eb2c0087db97711759be227e216f169814eb6 WatchSource:0}: Error finding container 44aa13b8df5c44e67ee2a05ae10eb2c0087db97711759be227e216f169814eb6: Status 404 returned error can't find the container with id 44aa13b8df5c44e67ee2a05ae10eb2c0087db97711759be227e216f169814eb6 Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.877814 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb"] Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.881399 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7bzmc"] Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.883842 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd"] Dec 04 15:05:05 crc kubenswrapper[4946]: W1204 15:05:05.906936 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b24b049_2010_4a5d_813a_55a70bc07eaf.slice/crio-8827e875c9601422226908a53e29caacb097e76fc637898ee7b77c6dd859addc WatchSource:0}: Error finding container 8827e875c9601422226908a53e29caacb097e76fc637898ee7b77c6dd859addc: Status 404 returned error can't find the container with id 8827e875c9601422226908a53e29caacb097e76fc637898ee7b77c6dd859addc Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.911435 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lg2p8"] Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.935724 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:05 crc kubenswrapper[4946]: E1204 15:05:05.936142 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:06.436110326 +0000 UTC m=+157.322153967 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:05 crc kubenswrapper[4946]: I1204 15:05:05.940686 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-v66dj"] Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.037001 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:06 crc kubenswrapper[4946]: E1204 15:05:06.037361 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:06.537309629 +0000 UTC m=+157.423353270 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.141279 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:06 crc kubenswrapper[4946]: E1204 15:05:06.141713 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:06.641696994 +0000 UTC m=+157.527740635 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.237551 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" event={"ID":"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e","Type":"ContainerStarted","Data":"44aa13b8df5c44e67ee2a05ae10eb2c0087db97711759be227e216f169814eb6"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.238604 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" event={"ID":"d0d0d8ae-0f47-4929-855b-60798f0d6bd3","Type":"ContainerStarted","Data":"4d37c9cca02026aa5c9aa1145541c72824af7728063d009d70ff3120f249c8dd"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.240347 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-v66dj" event={"ID":"447805f5-6492-4c42-95a5-ebfd9af1cf87","Type":"ContainerStarted","Data":"268aeb223a9fbc321a137da2976e15803acdf0eb0aa914bc9e9615e0af73bc42"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.242540 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:06 crc kubenswrapper[4946]: E1204 15:05:06.243351 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:06.74331433 +0000 UTC m=+157.629357971 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.243956 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" event={"ID":"ca64a188-72ed-4efc-820f-507e4f6e1d35","Type":"ContainerStarted","Data":"5275c339732199d0d84e34dfae584fe5acb7664a4d7f26cf6f50fbdbbbc6efbe"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.245890 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" event={"ID":"8d8084cb-4410-402a-a217-18dc2de50fd7","Type":"ContainerStarted","Data":"18e0b99c8ebca268a0c693e05b76bb8943eaa622475d5e3ac4347b5e189b42c6"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.247993 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq" event={"ID":"be182b12-eeb7-4695-b7e4-247044da76cf","Type":"ContainerStarted","Data":"2bf30e73a4b416c9f77a2a55ba10dcc2f36a9e5fa5faaa7c08eb9f642ba7d9fe"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.249339 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" event={"ID":"4ed440a8-a3c7-48da-9811-bcc77750303a","Type":"ContainerStarted","Data":"16bc3cc682dec02984aa4de0f94cc36692363a987e39e6c9a5e055ef879224d1"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.250451 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" event={"ID":"1984eac2-4bb6-4512-b134-d8bf2588db46","Type":"ContainerStarted","Data":"e9dc99f2d0dcfdf9436f064e3e08b8ca3bcef09d2bd28c9bd21b5cf55fa1050e"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.251367 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9qzcp" event={"ID":"d1001856-7024-4079-b253-c66661c9e6ef","Type":"ContainerStarted","Data":"b70ad1d26e38c4d4bde92069c90c5efe4b0965003eb166480dc098c849a31e17"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.252127 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" event={"ID":"02de3a18-59d7-48c0-bf9c-d40c09ed8cee","Type":"ContainerStarted","Data":"ad10e3b4559f19e1bfcb249147e585d52fc2c77595bc1b8161d22b7f6334609e"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.253699 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" event={"ID":"55893ee8-f0d7-4019-9522-45e8db696972","Type":"ContainerStarted","Data":"8997860aacf851d4a96f13cce92d42aab948f879bc808af91e3adb6a05839fd4"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.255972 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-w56q9" event={"ID":"53433a4f-ccda-4c5c-9dca-7389ec6d741c","Type":"ContainerStarted","Data":"de1bff8fe9860f8de269acf6f35ca45305abe6718cd32c651dc2cccde67dbd72"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.258947 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" event={"ID":"5b24b049-2010-4a5d-813a-55a70bc07eaf","Type":"ContainerStarted","Data":"8827e875c9601422226908a53e29caacb097e76fc637898ee7b77c6dd859addc"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.260570 4946 generic.go:334] "Generic (PLEG): container finished" podID="42d67fa8-e84e-4a09-a51d-c63365c274c5" containerID="66942f5f20e15f0c8a39b2828237e4c5c47d5cb99925f0a69829445b7584f8cd" exitCode=0 Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.260639 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" event={"ID":"42d67fa8-e84e-4a09-a51d-c63365c274c5","Type":"ContainerDied","Data":"66942f5f20e15f0c8a39b2828237e4c5c47d5cb99925f0a69829445b7584f8cd"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.262208 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" event={"ID":"d0437519-c01e-4b89-a007-8fda5902ea9f","Type":"ContainerStarted","Data":"892e207a0c3100120192f196c9b8cce3da2af4625fc3c2069d926430cd7877aa"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.263678 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" event={"ID":"0b12867e-de02-4b45-ac09-5140aab7451e","Type":"ContainerStarted","Data":"dd5749d0eba0fadc137ed3019b517bc7f496580ddde20187785b7ceef42904cf"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.264620 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" event={"ID":"650a1e58-9737-4c8a-b9aa-5529ca970fa6","Type":"ContainerStarted","Data":"c970b38d91b6231aef06c4306574d42e4113d7870197702cba46a83daa49d02b"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.265566 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7" event={"ID":"7f0be614-bbcc-46e2-b1ee-2944e087d3f4","Type":"ContainerStarted","Data":"2944656ff82ebe5eeb4cf74047e611a7fa949ac2198564b80db157056695055a"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.266299 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" event={"ID":"bf57c392-1dbe-4c73-96ea-98146b808571","Type":"ContainerStarted","Data":"d71aee2b91eda1ad294f0d7012758df01342061d9cccf093b0ccf0b7b2ec11db"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.267151 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" event={"ID":"65dc1ade-ddd4-4a22-99bd-780112f318f9","Type":"ContainerStarted","Data":"6382a2c8f30b2387d361f45683f3fdbd34361ddbddeed7a0a0e997bd7f998a0a"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.267997 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" event={"ID":"90dd2744-a408-4164-bd61-88f44a4dc1ef","Type":"ContainerStarted","Data":"d94cbe6fd8a32c16d8e890113f6d084ff9dabb6343302ad3e672f0e773e9244d"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.268871 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" event={"ID":"18864978-0492-4497-913e-283bf542b579","Type":"ContainerStarted","Data":"c5bdaad7e35bb8c79f5eb7e1de0da27115013df0618b8294a846d634b2dea3f5"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.269938 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" event={"ID":"4f02848c-6b07-4c72-8753-c34c4a3f210f","Type":"ContainerStarted","Data":"037aac42f0595e86c5f18f44a9b99768d6a4d632fe17f03168b3ab1c331cff7a"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.287011 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" event={"ID":"3c6657f5-af17-443b-882d-3e345029eac5","Type":"ContainerStarted","Data":"13acd963813d787b4ce5526bb0cc0d6240572b5b67da36aa542e4c3554d88570"} Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.287787 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.290435 4946 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-s796b container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.290514 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" podUID="c3d07ea4-a001-42fe-9405-7f9f95f5523f" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.294799 4946 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-bdwn8 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.294862 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" podUID="fb5b0fbe-569f-4edb-a5da-c1d37eec5981" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.313999 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" podStartSLOduration=136.313974851 podStartE2EDuration="2m16.313974851s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:06.31299244 +0000 UTC m=+157.199036081" watchObservedRunningTime="2025-12-04 15:05:06.313974851 +0000 UTC m=+157.200018492" Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.335266 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-69nvv" podStartSLOduration=136.335245058 podStartE2EDuration="2m16.335245058s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:06.334705451 +0000 UTC m=+157.220749092" watchObservedRunningTime="2025-12-04 15:05:06.335245058 +0000 UTC m=+157.221288699" Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.345970 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:06 crc kubenswrapper[4946]: E1204 15:05:06.346640 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:06.846626721 +0000 UTC m=+157.732670362 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.361457 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-v4qw8" podStartSLOduration=136.361430432 podStartE2EDuration="2m16.361430432s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:06.357733325 +0000 UTC m=+157.243776956" watchObservedRunningTime="2025-12-04 15:05:06.361430432 +0000 UTC m=+157.247474073" Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.447132 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:06 crc kubenswrapper[4946]: E1204 15:05:06.452042 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:06.95177196 +0000 UTC m=+157.837815601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.548727 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:06 crc kubenswrapper[4946]: E1204 15:05:06.549099 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.049083789 +0000 UTC m=+157.935127430 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.650259 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:06 crc kubenswrapper[4946]: E1204 15:05:06.650627 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.150600052 +0000 UTC m=+158.036643693 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.650903 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:06 crc kubenswrapper[4946]: E1204 15:05:06.651239 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.151226432 +0000 UTC m=+158.037270073 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.752101 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:06 crc kubenswrapper[4946]: E1204 15:05:06.752474 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.252456056 +0000 UTC m=+158.138499697 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.854321 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:06 crc kubenswrapper[4946]: E1204 15:05:06.854777 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.354759935 +0000 UTC m=+158.240803576 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:06 crc kubenswrapper[4946]: I1204 15:05:06.955263 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:06 crc kubenswrapper[4946]: E1204 15:05:06.955683 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.455663588 +0000 UTC m=+158.341707229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.057399 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.057883 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.557865743 +0000 UTC m=+158.443909394 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.159013 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.159536 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.659503161 +0000 UTC m=+158.545546852 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.261038 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.261462 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.761440307 +0000 UTC m=+158.647483948 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.321721 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" event={"ID":"8b20a593-dece-40b0-ae3f-12a9fabbf3e1","Type":"ContainerStarted","Data":"0acdc638d520ae630a51a954dac848defa1da4da6bd02d3ee074abf3577fe257"} Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.323852 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" event={"ID":"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b","Type":"ContainerStarted","Data":"2583b6a27965298080aad56a3dd2ac6d4afa26f968d38b7a42b91446a9c3f4e4"} Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.327449 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zzdp9" event={"ID":"488929dd-9d70-4b9f-b41b-40be79becc36","Type":"ContainerStarted","Data":"6f955a0b116c9109f77cdfb4b80b2b3b8d510eac915e503408a9c3307e63281c"} Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.330178 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" event={"ID":"44d826fd-fefe-496b-9ee3-b6ea83d8227a","Type":"ContainerStarted","Data":"ec37fc9b442139e528e762058c4dc84a641169daae1f622fbf27a441b4cf1681"} Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.332604 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-qdrtz" event={"ID":"70d4f1c0-1b54-4178-a025-d95419215a08","Type":"ContainerStarted","Data":"a1a7ae7c30330d454431332743ad288bc0018cff56b1a967f59e9e6975a251ed"} Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.335408 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" event={"ID":"aca3dc1d-be1a-49f6-86e8-0addede99412","Type":"ContainerStarted","Data":"51a72c765e06ad8d6b3f1fafbe94c812806ce5f28eaf002e794b2073e6a1a924"} Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.336427 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.341465 4946 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-z4t8l container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.341532 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" podUID="0b12867e-de02-4b45-ac09-5140aab7451e" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.343271 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bdwn8" Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.363253 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.863232559 +0000 UTC m=+158.749276200 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.365073 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.366383 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" podStartSLOduration=137.366367869 podStartE2EDuration="2m17.366367869s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:07.36358175 +0000 UTC m=+158.249625391" watchObservedRunningTime="2025-12-04 15:05:07.366367869 +0000 UTC m=+158.252411510" Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.367047 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.367894 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.867877607 +0000 UTC m=+158.753921248 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.470092 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.472444 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:07.972422117 +0000 UTC m=+158.858465758 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.572224 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.573348 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:08.073334291 +0000 UTC m=+158.959377932 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.673292 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.673835 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:08.173710898 +0000 UTC m=+159.059754539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.775197 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.775558 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:08.275546611 +0000 UTC m=+159.161590252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.875980 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.876260 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:08.376230688 +0000 UTC m=+159.262274359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.876481 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.877426 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:08.377401865 +0000 UTC m=+159.263445546 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:07 crc kubenswrapper[4946]: I1204 15:05:07.977627 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:07 crc kubenswrapper[4946]: E1204 15:05:07.978071 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:08.478049821 +0000 UTC m=+159.364093462 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.075583 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-df2mv"] Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.076790 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.079211 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:08 crc kubenswrapper[4946]: E1204 15:05:08.079740 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:08.579713139 +0000 UTC m=+159.465756780 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.080963 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.086226 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-df2mv"] Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.181076 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:08 crc kubenswrapper[4946]: E1204 15:05:08.181429 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:08.681400927 +0000 UTC m=+159.567444568 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.181853 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcdkj\" (UniqueName: \"kubernetes.io/projected/fec9d9bd-a20b-4625-9070-19949999c206-kube-api-access-rcdkj\") pod \"community-operators-df2mv\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.181919 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.181949 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-catalog-content\") pod \"community-operators-df2mv\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.181978 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-utilities\") pod \"community-operators-df2mv\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:05:08 crc kubenswrapper[4946]: E1204 15:05:08.182386 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:08.682368718 +0000 UTC m=+159.568412349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.295240 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.295712 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-catalog-content\") pod \"community-operators-df2mv\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.295763 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-utilities\") pod \"community-operators-df2mv\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.295846 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcdkj\" (UniqueName: \"kubernetes.io/projected/fec9d9bd-a20b-4625-9070-19949999c206-kube-api-access-rcdkj\") pod \"community-operators-df2mv\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:05:08 crc kubenswrapper[4946]: E1204 15:05:08.296455 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:08.796433791 +0000 UTC m=+159.682477432 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.296940 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-catalog-content\") pod \"community-operators-df2mv\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.297298 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-utilities\") pod \"community-operators-df2mv\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.316967 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7mzks"] Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.319210 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.324778 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.350382 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcdkj\" (UniqueName: \"kubernetes.io/projected/fec9d9bd-a20b-4625-9070-19949999c206-kube-api-access-rcdkj\") pod \"community-operators-df2mv\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.394225 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7mzks"] Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.397349 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:08 crc kubenswrapper[4946]: E1204 15:05:08.397785 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:08.897767909 +0000 UTC m=+159.783811550 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.452634 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.457917 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" event={"ID":"8d8084cb-4410-402a-a217-18dc2de50fd7","Type":"ContainerStarted","Data":"bf0a8111827fc96b53d254a10ffd47e57b8f1007da6f87cda095c9438f92856f"} Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.474152 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fqqvb"] Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.475402 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.492949 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" event={"ID":"1984eac2-4bb6-4512-b134-d8bf2588db46","Type":"ContainerStarted","Data":"943d13e2b8e78751b35172c9135d28daa45716677cfc2121fa3957457ab63cb8"} Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.493877 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2bms7" podStartSLOduration=137.493865969 podStartE2EDuration="2m17.493865969s" podCreationTimestamp="2025-12-04 15:02:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:08.492521716 +0000 UTC m=+159.378565357" watchObservedRunningTime="2025-12-04 15:05:08.493865969 +0000 UTC m=+159.379909600" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.501053 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.501332 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-catalog-content\") pod \"certified-operators-7mzks\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.501375 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8gdn\" (UniqueName: \"kubernetes.io/projected/73b58e95-46d5-468b-9890-a4fc3c5a0bde-kube-api-access-m8gdn\") pod \"certified-operators-7mzks\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.501393 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-utilities\") pod \"certified-operators-7mzks\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:05:08 crc kubenswrapper[4946]: E1204 15:05:08.501492 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.001477892 +0000 UTC m=+159.887521533 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.536750 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fqqvb"] Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.571576 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" event={"ID":"bf57c392-1dbe-4c73-96ea-98146b808571","Type":"ContainerStarted","Data":"44e52418dedf608e3e24f748b202f42356a835e4537933f9ecaf11ad09568900"} Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.575581 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7" event={"ID":"7f0be614-bbcc-46e2-b1ee-2944e087d3f4","Type":"ContainerStarted","Data":"aa7eaf55d79d73bfc606b7e1e9b01bfd7df31a792c469d782ebd4e921d50f591"} Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.583695 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-w56q9" event={"ID":"53433a4f-ccda-4c5c-9dca-7389ec6d741c","Type":"ContainerStarted","Data":"54ff70e6da225563ec691307f622feb5cd40f5a667d564da99cd01a5fa38aa2a"} Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.600316 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9qzcp" event={"ID":"d1001856-7024-4079-b253-c66661c9e6ef","Type":"ContainerStarted","Data":"179bcabaf68a36407d8f2a8b0b75bd714a21916daca03a82946ed98aac8c6794"} Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.602638 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8gdn\" (UniqueName: \"kubernetes.io/projected/73b58e95-46d5-468b-9890-a4fc3c5a0bde-kube-api-access-m8gdn\") pod \"certified-operators-7mzks\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.602673 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-utilities\") pod \"certified-operators-7mzks\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.603139 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.603208 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-utilities\") pod \"community-operators-fqqvb\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.603168 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-utilities\") pod \"certified-operators-7mzks\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.603263 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfh42\" (UniqueName: \"kubernetes.io/projected/ca7d43a3-9406-4810-9105-ede64b23375e-kube-api-access-kfh42\") pod \"community-operators-fqqvb\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.603460 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-catalog-content\") pod \"certified-operators-7mzks\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.603574 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-catalog-content\") pod \"community-operators-fqqvb\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.604042 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-catalog-content\") pod \"certified-operators-7mzks\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:05:08 crc kubenswrapper[4946]: E1204 15:05:08.604444 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.104423569 +0000 UTC m=+159.990467210 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.608947 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq" event={"ID":"be182b12-eeb7-4695-b7e4-247044da76cf","Type":"ContainerStarted","Data":"d5c727d8e5cd891ca48d1834356299a5dc26e96fa2b9738c353330b05a37caaf"} Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.611930 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" event={"ID":"4ed440a8-a3c7-48da-9811-bcc77750303a","Type":"ContainerStarted","Data":"bf95a80c9c2265b108c3d8e302222d394181cc2c4b787754a730675ee5ab2ae2"} Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.613428 4946 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-z4t8l container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.613480 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" podUID="0b12867e-de02-4b45-ac09-5140aab7451e" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.613547 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.621368 4946 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jvfnf container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:5443/healthz\": dial tcp 10.217.0.24:5443: connect: connection refused" start-of-body= Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.621422 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" podUID="44d826fd-fefe-496b-9ee3-b6ea83d8227a" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.24:5443/healthz\": dial tcp 10.217.0.24:5443: connect: connection refused" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.623756 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8gdn\" (UniqueName: \"kubernetes.io/projected/73b58e95-46d5-468b-9890-a4fc3c5a0bde-kube-api-access-m8gdn\") pod \"certified-operators-7mzks\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.627247 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rvvcq" podStartSLOduration=138.627232676 podStartE2EDuration="2m18.627232676s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:08.626776961 +0000 UTC m=+159.512820592" watchObservedRunningTime="2025-12-04 15:05:08.627232676 +0000 UTC m=+159.513276317" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.656263 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8mgfv"] Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.656434 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" podStartSLOduration=138.656407875 podStartE2EDuration="2m18.656407875s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:08.652647285 +0000 UTC m=+159.538690926" watchObservedRunningTime="2025-12-04 15:05:08.656407875 +0000 UTC m=+159.542451516" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.657901 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.682946 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8mgfv"] Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.705976 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:08 crc kubenswrapper[4946]: E1204 15:05:08.706347 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.206283524 +0000 UTC m=+160.092327175 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.707162 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.707209 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-utilities\") pod \"community-operators-fqqvb\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.707265 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-catalog-content\") pod \"certified-operators-8mgfv\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.707414 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfh42\" (UniqueName: \"kubernetes.io/projected/ca7d43a3-9406-4810-9105-ede64b23375e-kube-api-access-kfh42\") pod \"community-operators-fqqvb\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.707480 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5jk2\" (UniqueName: \"kubernetes.io/projected/8c9f7504-90bf-4e33-be97-43f5d81896ae-kube-api-access-t5jk2\") pod \"certified-operators-8mgfv\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.707569 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-utilities\") pod \"certified-operators-8mgfv\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.707667 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-catalog-content\") pod \"community-operators-fqqvb\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:05:08 crc kubenswrapper[4946]: E1204 15:05:08.714222 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.214155044 +0000 UTC m=+160.100198685 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.714787 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-utilities\") pod \"community-operators-fqqvb\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.716567 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-catalog-content\") pod \"community-operators-fqqvb\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.749759 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-qdrtz" podStartSLOduration=9.749740228 podStartE2EDuration="9.749740228s" podCreationTimestamp="2025-12-04 15:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:08.718504013 +0000 UTC m=+159.604547654" watchObservedRunningTime="2025-12-04 15:05:08.749740228 +0000 UTC m=+159.635783869" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.752580 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-zzdp9" podStartSLOduration=138.752566768 podStartE2EDuration="2m18.752566768s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:08.750666167 +0000 UTC m=+159.636709808" watchObservedRunningTime="2025-12-04 15:05:08.752566768 +0000 UTC m=+159.638610409" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.792742 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfh42\" (UniqueName: \"kubernetes.io/projected/ca7d43a3-9406-4810-9105-ede64b23375e-kube-api-access-kfh42\") pod \"community-operators-fqqvb\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.812457 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.812897 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-catalog-content\") pod \"certified-operators-8mgfv\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.812958 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5jk2\" (UniqueName: \"kubernetes.io/projected/8c9f7504-90bf-4e33-be97-43f5d81896ae-kube-api-access-t5jk2\") pod \"certified-operators-8mgfv\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.812990 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-utilities\") pod \"certified-operators-8mgfv\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.813634 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-utilities\") pod \"certified-operators-8mgfv\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.813946 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-catalog-content\") pod \"certified-operators-8mgfv\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:05:08 crc kubenswrapper[4946]: E1204 15:05:08.814078 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.314052606 +0000 UTC m=+160.200096247 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.819529 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.839288 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5jk2\" (UniqueName: \"kubernetes.io/projected/8c9f7504-90bf-4e33-be97-43f5d81896ae-kube-api-access-t5jk2\") pod \"certified-operators-8mgfv\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.899683 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.914341 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:08 crc kubenswrapper[4946]: E1204 15:05:08.914757 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.414738683 +0000 UTC m=+160.300782324 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:08 crc kubenswrapper[4946]: I1204 15:05:08.981928 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.017909 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.018160 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.518125866 +0000 UTC m=+160.404169507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.018291 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.018711 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.518695534 +0000 UTC m=+160.404739175 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.079325 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-df2mv"] Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.120486 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.121202 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.621171888 +0000 UTC m=+160.507215529 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.121441 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.122285 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.622262662 +0000 UTC m=+160.508306313 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.135695 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.139726 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.139781 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.200523 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7mzks"] Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.223723 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.224093 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.724068355 +0000 UTC m=+160.610111996 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: W1204 15:05:09.264671 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfec9d9bd_a20b_4625_9070_19949999c206.slice/crio-464c187576d5d7a083a8305483aed39b775960980b681b0f6b7ccb91bae11042 WatchSource:0}: Error finding container 464c187576d5d7a083a8305483aed39b775960980b681b0f6b7ccb91bae11042: Status 404 returned error can't find the container with id 464c187576d5d7a083a8305483aed39b775960980b681b0f6b7ccb91bae11042 Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.325570 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.325920 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.825907348 +0000 UTC m=+160.711950989 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.335980 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fqqvb"] Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.426597 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.427075 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.927037649 +0000 UTC m=+160.813081290 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.427356 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.427805 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:09.927790653 +0000 UTC m=+160.813834294 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.528552 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.529374 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:10.029325477 +0000 UTC m=+160.915369118 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.559421 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8mgfv"] Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.632222 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.632700 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:10.132677609 +0000 UTC m=+161.018721250 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.656656 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" event={"ID":"4f02848c-6b07-4c72-8753-c34c4a3f210f","Type":"ContainerStarted","Data":"1057fe578bb399ece0ba63bc4e7970810c1ed401eeae9fc77f760abfa831a0a4"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.661773 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" event={"ID":"02de3a18-59d7-48c0-bf9c-d40c09ed8cee","Type":"ContainerStarted","Data":"6ebf73ab902b371237ebee0052503c24a8656af082c62b7be81eb5296c96fde6"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.672650 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-v66dj" event={"ID":"447805f5-6492-4c42-95a5-ebfd9af1cf87","Type":"ContainerStarted","Data":"d4dff1050623a609fda93ecb61808a6c99f335db8743a0f70a2f38b0697d637b"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.682611 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" event={"ID":"ca64a188-72ed-4efc-820f-507e4f6e1d35","Type":"ContainerStarted","Data":"d0a221baa1130cd042e6b793117f12005fb2ee16b2ce3e58bc038c8226c6c90c"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.692347 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-df2mv" event={"ID":"fec9d9bd-a20b-4625-9070-19949999c206","Type":"ContainerStarted","Data":"464c187576d5d7a083a8305483aed39b775960980b681b0f6b7ccb91bae11042"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.733691 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.734548 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:10.234519562 +0000 UTC m=+161.120563203 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.735846 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" event={"ID":"cf2f88d9-df9c-4d4a-a1a1-6ab46612186e","Type":"ContainerStarted","Data":"814b1baa04b33fbcf211b1b8b9a79ca4f4ed01cadffb44120fdd4da3a2deac77"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.779235 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mzks" event={"ID":"73b58e95-46d5-468b-9890-a4fc3c5a0bde","Type":"ContainerStarted","Data":"5c906136ec926c0b895114e8943d633858caae888c00d50dd48d8ec2ed22cf2c"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.814637 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" event={"ID":"18864978-0492-4497-913e-283bf542b579","Type":"ContainerStarted","Data":"9ec8b4f973f4034ea4bf4aeb8c02486025b8f67b65501f2e85a8743a27620f25"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.822723 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" event={"ID":"650a1e58-9737-4c8a-b9aa-5529ca970fa6","Type":"ContainerStarted","Data":"232d3dd5f1336cc02a38c9ac579ff21931b195077afc883a27f6bac85115ad48"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.827664 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" event={"ID":"5b24b049-2010-4a5d-813a-55a70bc07eaf","Type":"ContainerStarted","Data":"fbb9185db3586f161d3b48720ef4b9de8aa2f4f29835cc824bfc0c736a7e25e0"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.835177 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.837201 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:10.337187382 +0000 UTC m=+161.223231023 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.848835 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" event={"ID":"55893ee8-f0d7-4019-9522-45e8db696972","Type":"ContainerStarted","Data":"d3dbfc0f54ad157d89818ebe1718ba407757b75f26c0c3616acd2d2c55f4c3e4"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.911547 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8mgfv" event={"ID":"8c9f7504-90bf-4e33-be97-43f5d81896ae","Type":"ContainerStarted","Data":"21adf73d36c612f99c8e2fb06eac64f367075b93414294c87475cf632e214281"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.935093 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" event={"ID":"90dd2744-a408-4164-bd61-88f44a4dc1ef","Type":"ContainerStarted","Data":"4d70c8ced96f15cbb5290b39a10d2ca2f2c85d41b588d04044cbdf82119b13b3"} Dec 04 15:05:09 crc kubenswrapper[4946]: I1204 15:05:09.938920 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:09 crc kubenswrapper[4946]: E1204 15:05:09.940203 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:10.440186213 +0000 UTC m=+161.326229854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.016767 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" event={"ID":"d0437519-c01e-4b89-a007-8fda5902ea9f","Type":"ContainerStarted","Data":"ea74667302b505bd6d90688efcbea14a75c2af7c7ac1da13c650c8be1dc8067d"} Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.034409 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqqvb" event={"ID":"ca7d43a3-9406-4810-9105-ede64b23375e","Type":"ContainerStarted","Data":"1173293872eb5ef463209b4d3c86e38c44b225cd33be064e7f2a89d9555946d4"} Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.040838 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.042529 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:10.542508542 +0000 UTC m=+161.428552173 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.043908 4946 generic.go:334] "Generic (PLEG): container finished" podID="9c28e21c-79cb-4fe0-b8f3-247fbce0640c" containerID="4656d0bedaa51169d870af586db6eaea2abeb8f959abfaf3444ed036ea78c29c" exitCode=0 Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.043996 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" event={"ID":"9c28e21c-79cb-4fe0-b8f3-247fbce0640c","Type":"ContainerDied","Data":"4656d0bedaa51169d870af586db6eaea2abeb8f959abfaf3444ed036ea78c29c"} Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.058465 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlxj"] Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.059606 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.063565 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.063768 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" event={"ID":"65dc1ade-ddd4-4a22-99bd-780112f318f9","Type":"ContainerStarted","Data":"507bc95ca05de023389fdb55fc743cf1424e634fd6b675813f88c34e669d2947"} Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.071980 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlxj"] Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.085074 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" event={"ID":"42d67fa8-e84e-4a09-a51d-c63365c274c5","Type":"ContainerStarted","Data":"374cab0427dbb6c4c228a9135c9273b6330f471bac33c642626a38f99af84cdf"} Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.107180 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" event={"ID":"21f370a9-11c1-4b16-9610-ced611035357","Type":"ContainerStarted","Data":"0d59ec08bde7cff79d6bc735bf56037771aa292a9425409fbdd45abe24523917"} Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.110368 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" event={"ID":"d0d0d8ae-0f47-4929-855b-60798f0d6bd3","Type":"ContainerStarted","Data":"705e5441e70c7ae56e9d502e2f553792e9251409a66e03ff941671dab60779d6"} Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.143837 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" event={"ID":"3539ee4e-f397-45e0-b449-93b150766448","Type":"ContainerStarted","Data":"8bd96c19f0b6114fdd51384ee772099106ad49dec191512be93f35b6cc37969c"} Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.145296 4946 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jvfnf container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:5443/healthz\": dial tcp 10.217.0.24:5443: connect: connection refused" start-of-body= Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.145370 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" podUID="44d826fd-fefe-496b-9ee3-b6ea83d8227a" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.24:5443/healthz\": dial tcp 10.217.0.24:5443: connect: connection refused" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.153451 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-w56q9" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.169105 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:10 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:10 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:10 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.169201 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.169519 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.169928 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxtzx\" (UniqueName: \"kubernetes.io/projected/7a98b449-5e32-4f53-8829-fc2d01b603b8-kube-api-access-cxtzx\") pod \"redhat-marketplace-vrlxj\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.170065 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:10.670036703 +0000 UTC m=+161.556080344 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.171501 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.171680 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-catalog-content\") pod \"redhat-marketplace-vrlxj\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.171958 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-utilities\") pod \"redhat-marketplace-vrlxj\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.179253 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:10.679226036 +0000 UTC m=+161.565269677 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.192234 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.195249 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.284984 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.285723 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxtzx\" (UniqueName: \"kubernetes.io/projected/7a98b449-5e32-4f53-8829-fc2d01b603b8-kube-api-access-cxtzx\") pod \"redhat-marketplace-vrlxj\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.285951 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-catalog-content\") pod \"redhat-marketplace-vrlxj\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.286000 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-utilities\") pod \"redhat-marketplace-vrlxj\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.286419 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-utilities\") pod \"redhat-marketplace-vrlxj\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.286490 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:10.786474372 +0000 UTC m=+161.672518013 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.295147 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-catalog-content\") pod \"redhat-marketplace-vrlxj\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.333013 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxtzx\" (UniqueName: \"kubernetes.io/projected/7a98b449-5e32-4f53-8829-fc2d01b603b8-kube-api-access-cxtzx\") pod \"redhat-marketplace-vrlxj\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.387778 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.388150 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:10.88813619 +0000 UTC m=+161.774179831 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.400229 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.401300 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-w56q9" podStartSLOduration=140.401279318 podStartE2EDuration="2m20.401279318s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:10.397109805 +0000 UTC m=+161.283153446" watchObservedRunningTime="2025-12-04 15:05:10.401279318 +0000 UTC m=+161.287322959" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.473881 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" podStartSLOduration=140.47386333 podStartE2EDuration="2m20.47386333s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:10.461993762 +0000 UTC m=+161.348037403" watchObservedRunningTime="2025-12-04 15:05:10.47386333 +0000 UTC m=+161.359906971" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.474900 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p65s4"] Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.486979 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.495918 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.496385 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:10.996365757 +0000 UTC m=+161.882409388 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.510901 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p65s4"] Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.596597 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" podStartSLOduration=140.596570948 podStartE2EDuration="2m20.596570948s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:10.568850305 +0000 UTC m=+161.454893956" watchObservedRunningTime="2025-12-04 15:05:10.596570948 +0000 UTC m=+161.482614589" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.611625 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-utilities\") pod \"redhat-marketplace-p65s4\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.611821 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkx6z\" (UniqueName: \"kubernetes.io/projected/9c9bd510-5d62-4814-bd88-62c5a3051f9d-kube-api-access-rkx6z\") pod \"redhat-marketplace-p65s4\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.611873 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.611969 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-catalog-content\") pod \"redhat-marketplace-p65s4\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.612417 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:11.112402702 +0000 UTC m=+161.998446333 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.618976 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.619025 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.629665 4946 patch_prober.go:28] interesting pod/apiserver-76f77b778f-z8dg2 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.6:8443/livez\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.629737 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" podUID="d0437519-c01e-4b89-a007-8fda5902ea9f" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.6:8443/livez\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.652601 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmpsb" podStartSLOduration=140.652575672 podStartE2EDuration="2m20.652575672s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:10.630152548 +0000 UTC m=+161.516196189" watchObservedRunningTime="2025-12-04 15:05:10.652575672 +0000 UTC m=+161.538619313" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.693325 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-6w5k9" podStartSLOduration=140.693286349 podStartE2EDuration="2m20.693286349s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:10.683166186 +0000 UTC m=+161.569209827" watchObservedRunningTime="2025-12-04 15:05:10.693286349 +0000 UTC m=+161.579330070" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.715509 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.720421 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:11.220388802 +0000 UTC m=+162.106432443 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.725225 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-9qzcp" podStartSLOduration=11.725192025 podStartE2EDuration="11.725192025s" podCreationTimestamp="2025-12-04 15:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:10.707492091 +0000 UTC m=+161.593535732" watchObservedRunningTime="2025-12-04 15:05:10.725192025 +0000 UTC m=+161.611235676" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.730812 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.731245 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-catalog-content\") pod \"redhat-marketplace-p65s4\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.731374 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-utilities\") pod \"redhat-marketplace-p65s4\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.731529 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkx6z\" (UniqueName: \"kubernetes.io/projected/9c9bd510-5d62-4814-bd88-62c5a3051f9d-kube-api-access-rkx6z\") pod \"redhat-marketplace-p65s4\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.732216 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:11.232199458 +0000 UTC m=+162.118243099 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.732931 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-catalog-content\") pod \"redhat-marketplace-p65s4\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.737234 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-utilities\") pod \"redhat-marketplace-p65s4\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.736681 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jw77k" podStartSLOduration=140.735658318 podStartE2EDuration="2m20.735658318s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:10.730089771 +0000 UTC m=+161.616133432" watchObservedRunningTime="2025-12-04 15:05:10.735658318 +0000 UTC m=+161.621701959" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.752773 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-cjfn2" podStartSLOduration=140.752753733 podStartE2EDuration="2m20.752753733s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:10.751342328 +0000 UTC m=+161.637385969" watchObservedRunningTime="2025-12-04 15:05:10.752753733 +0000 UTC m=+161.638797374" Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.754177 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c9f7504_90bf_4e33_be97_43f5d81896ae.slice/crio-conmon-2fa459994684bb66d3cf1cb5cb34ddddccc157e449962144dc5b25f9178863ef.scope\": RecentStats: unable to find data in memory cache]" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.786940 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkx6z\" (UniqueName: \"kubernetes.io/projected/9c9bd510-5d62-4814-bd88-62c5a3051f9d-kube-api-access-rkx6z\") pod \"redhat-marketplace-p65s4\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.812552 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gst2d" podStartSLOduration=140.812524206 podStartE2EDuration="2m20.812524206s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:10.787580602 +0000 UTC m=+161.673624243" watchObservedRunningTime="2025-12-04 15:05:10.812524206 +0000 UTC m=+161.698567847" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.828478 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-lg2p8" podStartSLOduration=140.828457694 podStartE2EDuration="2m20.828457694s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:10.827863795 +0000 UTC m=+161.713907446" watchObservedRunningTime="2025-12-04 15:05:10.828457694 +0000 UTC m=+161.714501335" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.832901 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.833430 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:11.333406481 +0000 UTC m=+162.219450122 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.901051 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.935446 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:10 crc kubenswrapper[4946]: E1204 15:05:10.936018 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:11.435997569 +0000 UTC m=+162.322041210 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:10 crc kubenswrapper[4946]: I1204 15:05:10.954956 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlxj"] Dec 04 15:05:11 crc kubenswrapper[4946]: W1204 15:05:11.027642 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a98b449_5e32_4f53_8829_fc2d01b603b8.slice/crio-bfa77449c22519cdb969dcb3682a9fed5102932a72a8d1d8e7a3a0f34fb91054 WatchSource:0}: Error finding container bfa77449c22519cdb969dcb3682a9fed5102932a72a8d1d8e7a3a0f34fb91054: Status 404 returned error can't find the container with id bfa77449c22519cdb969dcb3682a9fed5102932a72a8d1d8e7a3a0f34fb91054 Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.036708 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:11 crc kubenswrapper[4946]: E1204 15:05:11.036920 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:11.536870342 +0000 UTC m=+162.422913983 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.037009 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:11 crc kubenswrapper[4946]: E1204 15:05:11.037453 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:11.53743835 +0000 UTC m=+162.423481991 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.139952 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:11 crc kubenswrapper[4946]: E1204 15:05:11.140486 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:11.640459891 +0000 UTC m=+162.526503532 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.144886 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:11 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:11 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:11 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.144944 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.187151 4946 generic.go:334] "Generic (PLEG): container finished" podID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" containerID="60edc69588b32fe369e4a8adcd1d3ac406044918c6dee59389db4d7ecbcb0257" exitCode=0 Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.187272 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mzks" event={"ID":"73b58e95-46d5-468b-9890-a4fc3c5a0bde","Type":"ContainerDied","Data":"60edc69588b32fe369e4a8adcd1d3ac406044918c6dee59389db4d7ecbcb0257"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.192948 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.211418 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" event={"ID":"3c6657f5-af17-443b-882d-3e345029eac5","Type":"ContainerStarted","Data":"fb9977d02acb6f9b8968f9a4386dd2ade805e3de2e4ece02cab1c915586c519d"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.230705 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" event={"ID":"65dc1ade-ddd4-4a22-99bd-780112f318f9","Type":"ContainerStarted","Data":"cb83cf11e7b6501417b30d7e3b64d194964ac337ad4d79ccb11f9e14d33489e6"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.243391 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:11 crc kubenswrapper[4946]: E1204 15:05:11.243863 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:11.743846244 +0000 UTC m=+162.629889885 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.268632 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" event={"ID":"90dd2744-a408-4164-bd61-88f44a4dc1ef","Type":"ContainerStarted","Data":"cbbfbac809d3c53959176cded22463ee5cb41aae9e174130104d926e93be24fe"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.274032 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kr77j"] Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.283983 4946 generic.go:334] "Generic (PLEG): container finished" podID="fec9d9bd-a20b-4625-9070-19949999c206" containerID="01e4936e75999e2d560020435cb98c8f4cdc538ba011f2212c658784b5ffd270" exitCode=0 Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.286368 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-df2mv" event={"ID":"fec9d9bd-a20b-4625-9070-19949999c206","Type":"ContainerDied","Data":"01e4936e75999e2d560020435cb98c8f4cdc538ba011f2212c658784b5ffd270"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.286446 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.293770 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.312348 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-7bzmc" podStartSLOduration=141.312323045 podStartE2EDuration="2m21.312323045s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:11.305646282 +0000 UTC m=+162.191689923" watchObservedRunningTime="2025-12-04 15:05:11.312323045 +0000 UTC m=+162.198366686" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.314898 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kr77j"] Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.323276 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" event={"ID":"4ed440a8-a3c7-48da-9811-bcc77750303a","Type":"ContainerStarted","Data":"a4f5a7214dfa2d3d26432a8fd66446a2b70578d24624e93edc05e97d1836af7c"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.345540 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.345991 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-catalog-content\") pod \"redhat-operators-kr77j\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.346049 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd86j\" (UniqueName: \"kubernetes.io/projected/74c8940c-1ed3-4aaa-94aa-0623f25f008e-kube-api-access-rd86j\") pod \"redhat-operators-kr77j\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.346085 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-utilities\") pod \"redhat-operators-kr77j\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:05:11 crc kubenswrapper[4946]: E1204 15:05:11.347090 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:11.847058441 +0000 UTC m=+162.733102082 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.366625 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-w8nz2" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.396305 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlxj" event={"ID":"7a98b449-5e32-4f53-8829-fc2d01b603b8","Type":"ContainerStarted","Data":"bfa77449c22519cdb969dcb3682a9fed5102932a72a8d1d8e7a3a0f34fb91054"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.434593 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" event={"ID":"bbd993f8-1caa-4fc1-9d39-a9524fe1e48b","Type":"ContainerStarted","Data":"d93dbbad2064b0b150513328de14c0dfd999e69abc7ba560d521de7c32105243"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.449976 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-catalog-content\") pod \"redhat-operators-kr77j\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.450297 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd86j\" (UniqueName: \"kubernetes.io/projected/74c8940c-1ed3-4aaa-94aa-0623f25f008e-kube-api-access-rd86j\") pod \"redhat-operators-kr77j\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.450382 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-utilities\") pod \"redhat-operators-kr77j\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.450554 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:11 crc kubenswrapper[4946]: E1204 15:05:11.450965 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:11.95094986 +0000 UTC m=+162.836993501 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.452154 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-utilities\") pod \"redhat-operators-kr77j\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.452687 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-catalog-content\") pod \"redhat-operators-kr77j\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.488021 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd86j\" (UniqueName: \"kubernetes.io/projected/74c8940c-1ed3-4aaa-94aa-0623f25f008e-kube-api-access-rd86j\") pod \"redhat-operators-kr77j\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.508837 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" event={"ID":"bf57c392-1dbe-4c73-96ea-98146b808571","Type":"ContainerStarted","Data":"fbf6dd0a8c8f9fbe41653e7e24d434ca3b9d8f2dc238b4b6f82412c173a3efd2"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.511626 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-q85sw" podStartSLOduration=141.511597252 podStartE2EDuration="2m21.511597252s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:11.511590421 +0000 UTC m=+162.397634062" watchObservedRunningTime="2025-12-04 15:05:11.511597252 +0000 UTC m=+162.397640893" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.512721 4946 generic.go:334] "Generic (PLEG): container finished" podID="ca7d43a3-9406-4810-9105-ede64b23375e" containerID="93f940ca1cc04727811d4e6c3a33875f2621906ca18d6494efd6052d5d565f54" exitCode=0 Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.512781 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqqvb" event={"ID":"ca7d43a3-9406-4810-9105-ede64b23375e","Type":"ContainerDied","Data":"93f940ca1cc04727811d4e6c3a33875f2621906ca18d6494efd6052d5d565f54"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.534349 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" event={"ID":"aca3dc1d-be1a-49f6-86e8-0addede99412","Type":"ContainerStarted","Data":"85280d968b6124459d71f7ea138e9c52bd12251c0f7e92adac4d25f8cd3169e6"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.554530 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:11 crc kubenswrapper[4946]: E1204 15:05:11.556081 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:12.056060258 +0000 UTC m=+162.942103899 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.575427 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-fs4wh" podStartSLOduration=141.558109853 podStartE2EDuration="2m21.558109853s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:11.557096671 +0000 UTC m=+162.443140322" watchObservedRunningTime="2025-12-04 15:05:11.558109853 +0000 UTC m=+162.444153494" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.589273 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" event={"ID":"8b20a593-dece-40b0-ae3f-12a9fabbf3e1","Type":"ContainerStarted","Data":"f89160dd35613e851933aa3f7b92df0dffc12730862d9a91c35f19a033c06edb"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.632342 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.636472 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.636544 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.638993 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7" event={"ID":"7f0be614-bbcc-46e2-b1ee-2944e087d3f4","Type":"ContainerStarted","Data":"23d958019707850a44b83d5f012e60d56fce97e3eb4c3a7e034e9709f4656244"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.657338 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:11 crc kubenswrapper[4946]: E1204 15:05:11.659658 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:12.159642917 +0000 UTC m=+163.045686558 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.677775 4946 generic.go:334] "Generic (PLEG): container finished" podID="8c9f7504-90bf-4e33-be97-43f5d81896ae" containerID="2fa459994684bb66d3cf1cb5cb34ddddccc157e449962144dc5b25f9178863ef" exitCode=0 Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.678808 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8mgfv" event={"ID":"8c9f7504-90bf-4e33-be97-43f5d81896ae","Type":"ContainerDied","Data":"2fa459994684bb66d3cf1cb5cb34ddddccc157e449962144dc5b25f9178863ef"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.680873 4946 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-d6qcv container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 04 15:05:11 crc kubenswrapper[4946]: [+]log ok Dec 04 15:05:11 crc kubenswrapper[4946]: [+]etcd ok Dec 04 15:05:11 crc kubenswrapper[4946]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 04 15:05:11 crc kubenswrapper[4946]: [-]poststarthook/generic-apiserver-start-informers failed: reason withheld Dec 04 15:05:11 crc kubenswrapper[4946]: [+]poststarthook/max-in-flight-filter ok Dec 04 15:05:11 crc kubenswrapper[4946]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 04 15:05:11 crc kubenswrapper[4946]: [+]poststarthook/openshift.io-StartUserInformer ok Dec 04 15:05:11 crc kubenswrapper[4946]: [+]poststarthook/openshift.io-StartOAuthInformer ok Dec 04 15:05:11 crc kubenswrapper[4946]: [+]poststarthook/openshift.io-StartTokenTimeoutUpdater ok Dec 04 15:05:11 crc kubenswrapper[4946]: livez check failed Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.680953 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" podUID="42d67fa8-e84e-4a09-a51d-c63365c274c5" containerName="oauth-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.685940 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-47szc" podStartSLOduration=141.685919774 podStartE2EDuration="2m21.685919774s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:11.677444844 +0000 UTC m=+162.563488495" watchObservedRunningTime="2025-12-04 15:05:11.685919774 +0000 UTC m=+162.571963415" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.691888 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m666j"] Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.693000 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.712075 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m666j"] Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.717424 4946 generic.go:334] "Generic (PLEG): container finished" podID="18864978-0492-4497-913e-283bf542b579" containerID="9ec8b4f973f4034ea4bf4aeb8c02486025b8f67b65501f2e85a8743a27620f25" exitCode=0 Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.718539 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" event={"ID":"18864978-0492-4497-913e-283bf542b579","Type":"ContainerDied","Data":"9ec8b4f973f4034ea4bf4aeb8c02486025b8f67b65501f2e85a8743a27620f25"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.718582 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.718595 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" event={"ID":"18864978-0492-4497-913e-283bf542b579","Type":"ContainerStarted","Data":"594755046a796f7c1083aad4d39e2e73ed99d8f657abf9a95a29958dec71ae7a"} Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.762793 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.763828 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-utilities\") pod \"redhat-operators-m666j\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.763880 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-catalog-content\") pod \"redhat-operators-m666j\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.763913 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46l8s\" (UniqueName: \"kubernetes.io/projected/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-kube-api-access-46l8s\") pod \"redhat-operators-m666j\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:05:11 crc kubenswrapper[4946]: E1204 15:05:11.764313 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:12.264283289 +0000 UTC m=+163.150326930 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.765707 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.765753 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.766874 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.766906 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.771892 4946 patch_prober.go:28] interesting pod/console-f9d7485db-v4qw8 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.771963 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-v4qw8" podUID="12dc3c7b-da6c-46a0-b0c9-d0899e46837a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.798447 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p65s4"] Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.834329 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s796b" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.868618 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-utilities\") pod \"redhat-operators-m666j\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.868876 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-catalog-content\") pod \"redhat-operators-m666j\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.869005 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46l8s\" (UniqueName: \"kubernetes.io/projected/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-kube-api-access-46l8s\") pod \"redhat-operators-m666j\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.869331 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.894948 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-utilities\") pod \"redhat-operators-m666j\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:05:11 crc kubenswrapper[4946]: E1204 15:05:11.897994 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:12.397969157 +0000 UTC m=+163.284012788 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.901473 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-catalog-content\") pod \"redhat-operators-m666j\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.926393 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.930491 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7gdsg" podStartSLOduration=141.930462782 podStartE2EDuration="2m21.930462782s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:11.797862609 +0000 UTC m=+162.683906250" watchObservedRunningTime="2025-12-04 15:05:11.930462782 +0000 UTC m=+162.816506423" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.931133 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jfnsb" podStartSLOduration=141.931108433 podStartE2EDuration="2m21.931108433s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:11.894365963 +0000 UTC m=+162.780409604" watchObservedRunningTime="2025-12-04 15:05:11.931108433 +0000 UTC m=+162.817152074" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.955509 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46l8s\" (UniqueName: \"kubernetes.io/projected/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-kube-api-access-46l8s\") pod \"redhat-operators-m666j\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:05:11 crc kubenswrapper[4946]: I1204 15:05:11.977677 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qhg9d" podStartSLOduration=141.977647005 podStartE2EDuration="2m21.977647005s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:11.977316714 +0000 UTC m=+162.863360375" watchObservedRunningTime="2025-12-04 15:05:11.977647005 +0000 UTC m=+162.863690646" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.000378 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-7tl7m" podStartSLOduration=142.000358218 podStartE2EDuration="2m22.000358218s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:11.959544178 +0000 UTC m=+162.845587819" watchObservedRunningTime="2025-12-04 15:05:12.000358218 +0000 UTC m=+162.886401859" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.003624 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.004242 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:12.504215941 +0000 UTC m=+163.390259592 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.030982 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" podStartSLOduration=142.030956113 podStartE2EDuration="2m22.030956113s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:12.020403047 +0000 UTC m=+162.906446688" watchObservedRunningTime="2025-12-04 15:05:12.030956113 +0000 UTC m=+162.916999754" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.057838 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-5fshb" podStartSLOduration=142.057816918 podStartE2EDuration="2m22.057816918s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:12.056855558 +0000 UTC m=+162.942899199" watchObservedRunningTime="2025-12-04 15:05:12.057816918 +0000 UTC m=+162.943860559" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.105580 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.106094 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:12.606060644 +0000 UTC m=+163.492104275 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.148363 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:12 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:12 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:12 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.148427 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.160462 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.206845 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.207304 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:12.707270768 +0000 UTC m=+163.593314399 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.207721 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.208151 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:12.708137195 +0000 UTC m=+163.594180836 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.253673 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" podStartSLOduration=142.253647524 podStartE2EDuration="2m22.253647524s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:12.190425051 +0000 UTC m=+163.076468692" watchObservedRunningTime="2025-12-04 15:05:12.253647524 +0000 UTC m=+163.139691165" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.254014 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2bxm7" podStartSLOduration=142.254006526 podStartE2EDuration="2m22.254006526s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:12.250781463 +0000 UTC m=+163.136825104" watchObservedRunningTime="2025-12-04 15:05:12.254006526 +0000 UTC m=+163.140050167" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.313308 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vsskd" podStartSLOduration=142.313282004 podStartE2EDuration="2m22.313282004s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:12.283509876 +0000 UTC m=+163.169553517" watchObservedRunningTime="2025-12-04 15:05:12.313282004 +0000 UTC m=+163.199325645" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.314873 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.315475 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:12.815452353 +0000 UTC m=+163.701496004 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.393980 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.417008 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.417449 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:12.917419471 +0000 UTC m=+163.803463122 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.521541 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-config-volume\") pod \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.521621 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbmbg\" (UniqueName: \"kubernetes.io/projected/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-kube-api-access-pbmbg\") pod \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.521761 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.521802 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-secret-volume\") pod \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\" (UID: \"9c28e21c-79cb-4fe0-b8f3-247fbce0640c\") " Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.523777 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.023736397 +0000 UTC m=+163.909780038 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.524464 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-config-volume" (OuterVolumeSpecName: "config-volume") pod "9c28e21c-79cb-4fe0-b8f3-247fbce0640c" (UID: "9c28e21c-79cb-4fe0-b8f3-247fbce0640c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.544319 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9c28e21c-79cb-4fe0-b8f3-247fbce0640c" (UID: "9c28e21c-79cb-4fe0-b8f3-247fbce0640c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.547008 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-kube-api-access-pbmbg" (OuterVolumeSpecName: "kube-api-access-pbmbg") pod "9c28e21c-79cb-4fe0-b8f3-247fbce0640c" (UID: "9c28e21c-79cb-4fe0-b8f3-247fbce0640c"). InnerVolumeSpecName "kube-api-access-pbmbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.624246 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.624417 4946 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.624432 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbmbg\" (UniqueName: \"kubernetes.io/projected/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-kube-api-access-pbmbg\") on node \"crc\" DevicePath \"\"" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.624444 4946 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c28e21c-79cb-4fe0-b8f3-247fbce0640c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.624819 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.124799296 +0000 UTC m=+164.010842937 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.694681 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jvfnf" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.725428 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.725862 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.225810793 +0000 UTC m=+164.111854434 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.726034 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.726768 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.226758093 +0000 UTC m=+164.112801734 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.726972 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kr77j"] Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.767938 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" event={"ID":"650a1e58-9737-4c8a-b9aa-5529ca970fa6","Type":"ContainerStarted","Data":"3f796c8218a021d9de679d029548271e77e51f05e5bf2472ff01ed8d83397229"} Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.769197 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.798189 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" event={"ID":"9c28e21c-79cb-4fe0-b8f3-247fbce0640c","Type":"ContainerDied","Data":"1e3838e41f91698e22c4bc84f3994d4ebb343f1783ad111b6e5993234b5b6903"} Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.798253 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e3838e41f91698e22c4bc84f3994d4ebb343f1783ad111b6e5993234b5b6903" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.798395 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.826018 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" podStartSLOduration=142.825995644 podStartE2EDuration="2m22.825995644s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:12.825743205 +0000 UTC m=+163.711786846" watchObservedRunningTime="2025-12-04 15:05:12.825995644 +0000 UTC m=+163.712039285" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.833545 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.834397 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.334376761 +0000 UTC m=+164.220420402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.853065 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-v66dj" event={"ID":"447805f5-6492-4c42-95a5-ebfd9af1cf87","Type":"ContainerStarted","Data":"6b01d6c3f1308197f3b59413536e1d2edf004b8a0cba41cdc2ec1fc0ee4741f4"} Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.854147 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.923313 4946 generic.go:334] "Generic (PLEG): container finished" podID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" containerID="77f6ade1abff12cea871879f6a6a5b4381b231c95a8d3e57e4340b64d7b13ff8" exitCode=0 Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.931563 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p65s4" event={"ID":"9c9bd510-5d62-4814-bd88-62c5a3051f9d","Type":"ContainerDied","Data":"77f6ade1abff12cea871879f6a6a5b4381b231c95a8d3e57e4340b64d7b13ff8"} Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.931676 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p65s4" event={"ID":"9c9bd510-5d62-4814-bd88-62c5a3051f9d","Type":"ContainerStarted","Data":"041d8374272d456cc7d7f2386ecf4329645af6f3136e5ce70c8802290702f418"} Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.950557 4946 generic.go:334] "Generic (PLEG): container finished" podID="7a98b449-5e32-4f53-8829-fc2d01b603b8" containerID="cc55a7e1d0f40151d69d4ff686aa0f4be52cba827fe4064ec2dfb7ddf77119cf" exitCode=0 Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.951420 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlxj" event={"ID":"7a98b449-5e32-4f53-8829-fc2d01b603b8","Type":"ContainerDied","Data":"cc55a7e1d0f40151d69d4ff686aa0f4be52cba827fe4064ec2dfb7ddf77119cf"} Dec 04 15:05:12 crc kubenswrapper[4946]: I1204 15:05:12.963922 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:12 crc kubenswrapper[4946]: E1204 15:05:12.967282 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.467255983 +0000 UTC m=+164.353299624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.001079 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.001186 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.001279 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.001313 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.003415 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-v66dj" podStartSLOduration=14.003400134 podStartE2EDuration="14.003400134s" podCreationTimestamp="2025-12-04 15:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:12.951830611 +0000 UTC m=+163.837874382" watchObservedRunningTime="2025-12-04 15:05:13.003400134 +0000 UTC m=+163.889443775" Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.073962 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.075792 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.575771819 +0000 UTC m=+164.461815460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.152158 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.159641 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:13 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:13 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:13 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.159714 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.177863 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.178015 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.182616 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.682591891 +0000 UTC m=+164.568635532 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.229154 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0a3cccbb-17c2-487d-a952-6b5d50656e2a-metrics-certs\") pod \"network-metrics-daemon-9xbtr\" (UID: \"0a3cccbb-17c2-487d-a952-6b5d50656e2a\") " pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.257889 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m666j"] Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.280904 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.282328 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.782291556 +0000 UTC m=+164.668335197 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.376436 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9xbtr" Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.382766 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.383358 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.883342445 +0000 UTC m=+164.769386086 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.396156 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.484583 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.484743 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.984707313 +0000 UTC m=+164.870750954 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.485022 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.485535 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:13.985509109 +0000 UTC m=+164.871552920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.585972 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.586254 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.086217676 +0000 UTC m=+164.972261317 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.586350 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.586785 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.086763303 +0000 UTC m=+164.972806944 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.688470 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.688805 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.188761312 +0000 UTC m=+165.074804963 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.688875 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.689379 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.189360751 +0000 UTC m=+165.075404412 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.789682 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.789963 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.289862792 +0000 UTC m=+165.175906433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.790142 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.790595 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.290587975 +0000 UTC m=+165.176631616 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.891226 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.891369 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.391344944 +0000 UTC m=+165.277388575 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.891935 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.892474 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.39246323 +0000 UTC m=+165.278506871 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.958789 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr77j" event={"ID":"74c8940c-1ed3-4aaa-94aa-0623f25f008e","Type":"ContainerStarted","Data":"049ce5875fd860851cfbb3f16e41aaa5a46080273d386b5f20bd2afaa03c1f8a"} Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.993367 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.993661 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.493622782 +0000 UTC m=+165.379666423 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:13 crc kubenswrapper[4946]: I1204 15:05:13.993748 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:13 crc kubenswrapper[4946]: E1204 15:05:13.994222 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.4942116 +0000 UTC m=+165.380255241 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.095791 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.096039 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.595991702 +0000 UTC m=+165.482035373 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.096177 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.096811 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.596789748 +0000 UTC m=+165.482833389 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.137888 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:14 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:14 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:14 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.137957 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.162921 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:05:14 crc kubenswrapper[4946]: W1204 15:05:14.164645 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc847e258_e0f2_4129_bcf4_6fc12cd4dfe5.slice/crio-7a18209d2bb4913901f40399553d8cccaaf71349542ad8f9df49a38ea45c6ba7 WatchSource:0}: Error finding container 7a18209d2bb4913901f40399553d8cccaaf71349542ad8f9df49a38ea45c6ba7: Status 404 returned error can't find the container with id 7a18209d2bb4913901f40399553d8cccaaf71349542ad8f9df49a38ea45c6ba7 Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.207839 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.208014 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.707988719 +0000 UTC m=+165.594032360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.208479 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.209040 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.709028752 +0000 UTC m=+165.595072393 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.309536 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.311211 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.811191166 +0000 UTC m=+165.697234807 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.412093 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.412503 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:14.912482862 +0000 UTC m=+165.798526503 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.513504 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.513765 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.013725967 +0000 UTC m=+165.899769608 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.513820 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.514295 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.014286975 +0000 UTC m=+165.900330616 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.613304 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-9xbtr"] Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.615622 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.616069 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.116047736 +0000 UTC m=+166.002091367 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: W1204 15:05:14.622719 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a3cccbb_17c2_487d_a952_6b5d50656e2a.slice/crio-ad1972e17ab7f6ed046089caf6e77efe0c49b77de9b214a6971f62040e469abd WatchSource:0}: Error finding container ad1972e17ab7f6ed046089caf6e77efe0c49b77de9b214a6971f62040e469abd: Status 404 returned error can't find the container with id ad1972e17ab7f6ed046089caf6e77efe0c49b77de9b214a6971f62040e469abd Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.717949 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.718571 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.218508789 +0000 UTC m=+166.104552430 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.819222 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.819653 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.31963335 +0000 UTC m=+166.205676991 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.921138 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:14 crc kubenswrapper[4946]: E1204 15:05:14.921574 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.421554536 +0000 UTC m=+166.307598177 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.979713 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" event={"ID":"3c6657f5-af17-443b-882d-3e345029eac5","Type":"ContainerStarted","Data":"6585b7c0fc64425f98b9908fd5d96c0e47ac6d4acf24b7e93bdc8dd099449ac5"} Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.981745 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" event={"ID":"0a3cccbb-17c2-487d-a952-6b5d50656e2a","Type":"ContainerStarted","Data":"ad1972e17ab7f6ed046089caf6e77efe0c49b77de9b214a6971f62040e469abd"} Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.983975 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m666j" event={"ID":"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5","Type":"ContainerStarted","Data":"5efc02c5dfaa11891ddbe4d8b5bb46eae0ef6ebe11b23c8a0d37947e87a7b06a"} Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.984038 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m666j" event={"ID":"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5","Type":"ContainerStarted","Data":"7a18209d2bb4913901f40399553d8cccaaf71349542ad8f9df49a38ea45c6ba7"} Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.988378 4946 generic.go:334] "Generic (PLEG): container finished" podID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" containerID="b7ae1d683095b1730459bcd981d11898ca955a57b47fa8612f41f2163b250e24" exitCode=0 Dec 04 15:05:14 crc kubenswrapper[4946]: I1204 15:05:14.988997 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr77j" event={"ID":"74c8940c-1ed3-4aaa-94aa-0623f25f008e","Type":"ContainerDied","Data":"b7ae1d683095b1730459bcd981d11898ca955a57b47fa8612f41f2163b250e24"} Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.022005 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.022206 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.522175861 +0000 UTC m=+166.408219502 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.023668 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.523643317 +0000 UTC m=+166.409686958 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.023668 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.126976 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.127504 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.627473194 +0000 UTC m=+166.513516835 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.127580 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.128043 4946 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.128056 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.628038062 +0000 UTC m=+166.514081703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.136670 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:15 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:15 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:15 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.139620 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.178762 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xwt27" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.241173 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.241477 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.741417653 +0000 UTC m=+166.627461294 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.241799 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.243636 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.743615303 +0000 UTC m=+166.629658944 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.345355 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.346456 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.846408767 +0000 UTC m=+166.732452428 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.459281 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.459698 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:15.959682585 +0000 UTC m=+166.845726226 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.561051 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.561326 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:16.061292331 +0000 UTC m=+166.947335982 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.561477 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.561886 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:16.06186927 +0000 UTC m=+166.947912911 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.646156 4946 patch_prober.go:28] interesting pod/apiserver-76f77b778f-z8dg2 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]log ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]etcd ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]poststarthook/generic-apiserver-start-informers ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]poststarthook/max-in-flight-filter ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 04 15:05:15 crc kubenswrapper[4946]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 04 15:05:15 crc kubenswrapper[4946]: [+]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]poststarthook/project.openshift.io-projectcache ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]poststarthook/openshift.io-startinformers ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 04 15:05:15 crc kubenswrapper[4946]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 04 15:05:15 crc kubenswrapper[4946]: livez check failed Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.646222 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" podUID="d0437519-c01e-4b89-a007-8fda5902ea9f" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.663013 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.663640 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:16.16361674 +0000 UTC m=+167.049660391 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.711535 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.712787 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c28e21c-79cb-4fe0-b8f3-247fbce0640c" containerName="collect-profiles" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.712814 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c28e21c-79cb-4fe0-b8f3-247fbce0640c" containerName="collect-profiles" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.712977 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c28e21c-79cb-4fe0-b8f3-247fbce0640c" containerName="collect-profiles" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.713739 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.717773 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.717991 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.726851 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.765807 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.766381 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:16.266341981 +0000 UTC m=+167.152385622 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.867647 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.867851 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:16.367817593 +0000 UTC m=+167.253861234 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.867898 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b9b9aedb-fb46-4613-a3e7-bd0b702e3772\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.867967 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.867998 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b9b9aedb-fb46-4613-a3e7-bd0b702e3772\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.868542 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:16.368519205 +0000 UTC m=+167.254562886 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.968932 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.969101 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:16.469064268 +0000 UTC m=+167.355107939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.969262 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b9b9aedb-fb46-4613-a3e7-bd0b702e3772\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.969337 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.969382 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b9b9aedb-fb46-4613-a3e7-bd0b702e3772\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 15:05:15 crc kubenswrapper[4946]: I1204 15:05:15.969417 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b9b9aedb-fb46-4613-a3e7-bd0b702e3772\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 15:05:15 crc kubenswrapper[4946]: E1204 15:05:15.969876 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 15:05:16.469848083 +0000 UTC m=+167.355891724 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-56jhv" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.003716 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b9b9aedb-fb46-4613-a3e7-bd0b702e3772\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.039921 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.070938 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.071076 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" event={"ID":"0a3cccbb-17c2-487d-a952-6b5d50656e2a","Type":"ContainerStarted","Data":"2cb154b27a9aef2b6cf7ea0371d1ffa124acacb5e2145bbe36d54a149cac739a"} Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.071148 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9xbtr" event={"ID":"0a3cccbb-17c2-487d-a952-6b5d50656e2a","Type":"ContainerStarted","Data":"01152898f1f118ca926a86a897a69b5ea98bdc9c99eb70832c032c718128ee8f"} Dec 04 15:05:16 crc kubenswrapper[4946]: E1204 15:05:16.071399 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 15:05:16.571374276 +0000 UTC m=+167.457417917 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.075570 4946 generic.go:334] "Generic (PLEG): container finished" podID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" containerID="5efc02c5dfaa11891ddbe4d8b5bb46eae0ef6ebe11b23c8a0d37947e87a7b06a" exitCode=0 Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.075632 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m666j" event={"ID":"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5","Type":"ContainerDied","Data":"5efc02c5dfaa11891ddbe4d8b5bb46eae0ef6ebe11b23c8a0d37947e87a7b06a"} Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.096846 4946 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-04T15:05:15.128060843Z","Handler":null,"Name":""} Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.098881 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-9xbtr" podStartSLOduration=146.098852921 podStartE2EDuration="2m26.098852921s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:16.093873503 +0000 UTC m=+166.979917144" watchObservedRunningTime="2025-12-04 15:05:16.098852921 +0000 UTC m=+166.984896562" Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.110033 4946 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.110077 4946 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.124526 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" event={"ID":"3c6657f5-af17-443b-882d-3e345029eac5","Type":"ContainerStarted","Data":"3c55a91ee3a3ebeced1a5f8fbaf84c9f9ae184984bd44cd6d7ee626ce13f561a"} Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.136671 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:16 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:16 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:16 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.136736 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.172271 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.175958 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.176007 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.249623 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-56jhv\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.274841 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.316532 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.320818 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.653513 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.663676 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d6qcv" Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.828055 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 04 15:05:16 crc kubenswrapper[4946]: I1204 15:05:16.911794 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-56jhv"] Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.140689 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:17 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:17 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:17 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.140757 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.159400 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b9b9aedb-fb46-4613-a3e7-bd0b702e3772","Type":"ContainerStarted","Data":"9ba4dd1892b9f0085530614450acaa130b5eb950f72f8bdae8e622bcdfe4909c"} Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.187061 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" event={"ID":"0ae46332-ca8f-4850-96bc-ca2d408b51d3","Type":"ContainerStarted","Data":"1b66f669918db115e91c3af26b1c66b1830a8b03f57d312d183cb01ed4c13950"} Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.201802 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" event={"ID":"3c6657f5-af17-443b-882d-3e345029eac5","Type":"ContainerStarted","Data":"8e0ec0c0063e55b622e6a8b98346657252408facbef68e6b504f7b28e8870aa5"} Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.225741 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-wm6jt" podStartSLOduration=18.225711801 podStartE2EDuration="18.225711801s" podCreationTimestamp="2025-12-04 15:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:17.222171378 +0000 UTC m=+168.108215019" watchObservedRunningTime="2025-12-04 15:05:17.225711801 +0000 UTC m=+168.111755442" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.448728 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.450802 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.474044 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.474634 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.499009 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f722f20d-9a72-47d8-bec8-81d1dbc17a42\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.499077 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f722f20d-9a72-47d8-bec8-81d1dbc17a42\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.503659 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.504742 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.600367 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f722f20d-9a72-47d8-bec8-81d1dbc17a42\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.600426 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f722f20d-9a72-47d8-bec8-81d1dbc17a42\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.600789 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f722f20d-9a72-47d8-bec8-81d1dbc17a42\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.641083 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f722f20d-9a72-47d8-bec8-81d1dbc17a42\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 15:05:17 crc kubenswrapper[4946]: I1204 15:05:17.805153 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 15:05:18 crc kubenswrapper[4946]: I1204 15:05:18.137318 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:18 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:18 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:18 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:18 crc kubenswrapper[4946]: I1204 15:05:18.138003 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:18 crc kubenswrapper[4946]: I1204 15:05:18.206864 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-v66dj" Dec 04 15:05:18 crc kubenswrapper[4946]: I1204 15:05:18.278891 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" event={"ID":"0ae46332-ca8f-4850-96bc-ca2d408b51d3","Type":"ContainerStarted","Data":"ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d"} Dec 04 15:05:18 crc kubenswrapper[4946]: I1204 15:05:18.279373 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:18 crc kubenswrapper[4946]: I1204 15:05:18.323301 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" podStartSLOduration=148.323274408 podStartE2EDuration="2m28.323274408s" podCreationTimestamp="2025-12-04 15:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:18.317661729 +0000 UTC m=+169.203705370" watchObservedRunningTime="2025-12-04 15:05:18.323274408 +0000 UTC m=+169.209318049" Dec 04 15:05:18 crc kubenswrapper[4946]: I1204 15:05:18.336377 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 04 15:05:19 crc kubenswrapper[4946]: I1204 15:05:19.139841 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:19 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:19 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:19 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:19 crc kubenswrapper[4946]: I1204 15:05:19.140512 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:19 crc kubenswrapper[4946]: I1204 15:05:19.341454 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f722f20d-9a72-47d8-bec8-81d1dbc17a42","Type":"ContainerStarted","Data":"1c305b9cf20ab1d99146421f17130a0344d107836dd4455603d7770605037d40"} Dec 04 15:05:19 crc kubenswrapper[4946]: I1204 15:05:19.345823 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b9b9aedb-fb46-4613-a3e7-bd0b702e3772","Type":"ContainerStarted","Data":"f1fa2e349c83e98e7e5517d41334c66b8dd062818bb4eb78f0450903e9fc7066"} Dec 04 15:05:19 crc kubenswrapper[4946]: I1204 15:05:19.369019 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=4.368999313 podStartE2EDuration="4.368999313s" podCreationTimestamp="2025-12-04 15:05:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:19.36481445 +0000 UTC m=+170.250858091" watchObservedRunningTime="2025-12-04 15:05:19.368999313 +0000 UTC m=+170.255042954" Dec 04 15:05:20 crc kubenswrapper[4946]: I1204 15:05:20.155172 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:20 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:20 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:20 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:20 crc kubenswrapper[4946]: I1204 15:05:20.155641 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:20 crc kubenswrapper[4946]: I1204 15:05:20.428451 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f722f20d-9a72-47d8-bec8-81d1dbc17a42","Type":"ContainerStarted","Data":"3b0ccb92a4f55abfac386ba9eb2f0407d27c1576cbe02b8801b6625c860825a4"} Dec 04 15:05:20 crc kubenswrapper[4946]: I1204 15:05:20.433326 4946 generic.go:334] "Generic (PLEG): container finished" podID="b9b9aedb-fb46-4613-a3e7-bd0b702e3772" containerID="f1fa2e349c83e98e7e5517d41334c66b8dd062818bb4eb78f0450903e9fc7066" exitCode=0 Dec 04 15:05:20 crc kubenswrapper[4946]: I1204 15:05:20.433381 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b9b9aedb-fb46-4613-a3e7-bd0b702e3772","Type":"ContainerDied","Data":"f1fa2e349c83e98e7e5517d41334c66b8dd062818bb4eb78f0450903e9fc7066"} Dec 04 15:05:20 crc kubenswrapper[4946]: I1204 15:05:20.625276 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:05:20 crc kubenswrapper[4946]: I1204 15:05:20.631431 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-z8dg2" Dec 04 15:05:21 crc kubenswrapper[4946]: I1204 15:05:21.135817 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:21 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:21 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:21 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:21 crc kubenswrapper[4946]: I1204 15:05:21.135878 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:21 crc kubenswrapper[4946]: I1204 15:05:21.542890 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=4.54286595 podStartE2EDuration="4.54286595s" podCreationTimestamp="2025-12-04 15:05:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:05:21.541239968 +0000 UTC m=+172.427283609" watchObservedRunningTime="2025-12-04 15:05:21.54286595 +0000 UTC m=+172.428909591" Dec 04 15:05:21 crc kubenswrapper[4946]: I1204 15:05:21.745703 4946 patch_prober.go:28] interesting pod/console-f9d7485db-v4qw8 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Dec 04 15:05:21 crc kubenswrapper[4946]: I1204 15:05:21.745779 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-v4qw8" podUID="12dc3c7b-da6c-46a0-b0c9-d0899e46837a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.143390 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:22 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:22 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:22 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.143907 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.206432 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.330770 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kube-api-access\") pod \"b9b9aedb-fb46-4613-a3e7-bd0b702e3772\" (UID: \"b9b9aedb-fb46-4613-a3e7-bd0b702e3772\") " Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.330848 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kubelet-dir\") pod \"b9b9aedb-fb46-4613-a3e7-bd0b702e3772\" (UID: \"b9b9aedb-fb46-4613-a3e7-bd0b702e3772\") " Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.330926 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b9b9aedb-fb46-4613-a3e7-bd0b702e3772" (UID: "b9b9aedb-fb46-4613-a3e7-bd0b702e3772"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.331482 4946 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.337655 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b9b9aedb-fb46-4613-a3e7-bd0b702e3772" (UID: "b9b9aedb-fb46-4613-a3e7-bd0b702e3772"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.434998 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9b9aedb-fb46-4613-a3e7-bd0b702e3772-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.480489 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.480597 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.561391 4946 generic.go:334] "Generic (PLEG): container finished" podID="f722f20d-9a72-47d8-bec8-81d1dbc17a42" containerID="3b0ccb92a4f55abfac386ba9eb2f0407d27c1576cbe02b8801b6625c860825a4" exitCode=0 Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.561478 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f722f20d-9a72-47d8-bec8-81d1dbc17a42","Type":"ContainerDied","Data":"3b0ccb92a4f55abfac386ba9eb2f0407d27c1576cbe02b8801b6625c860825a4"} Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.565636 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b9b9aedb-fb46-4613-a3e7-bd0b702e3772","Type":"ContainerDied","Data":"9ba4dd1892b9f0085530614450acaa130b5eb950f72f8bdae8e622bcdfe4909c"} Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.565690 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ba4dd1892b9f0085530614450acaa130b5eb950f72f8bdae8e622bcdfe4909c" Dec 04 15:05:22 crc kubenswrapper[4946]: I1204 15:05:22.565754 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 15:05:23 crc kubenswrapper[4946]: I1204 15:05:23.000937 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:05:23 crc kubenswrapper[4946]: I1204 15:05:23.000936 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:05:23 crc kubenswrapper[4946]: I1204 15:05:23.001021 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:05:23 crc kubenswrapper[4946]: I1204 15:05:23.001086 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:05:23 crc kubenswrapper[4946]: I1204 15:05:23.145737 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:23 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:23 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:23 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:23 crc kubenswrapper[4946]: I1204 15:05:23.145847 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:24 crc kubenswrapper[4946]: I1204 15:05:24.136782 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:24 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:24 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:24 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:24 crc kubenswrapper[4946]: I1204 15:05:24.137409 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:25 crc kubenswrapper[4946]: I1204 15:05:25.135860 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:25 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:25 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:25 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:25 crc kubenswrapper[4946]: I1204 15:05:25.135960 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:26 crc kubenswrapper[4946]: I1204 15:05:26.138967 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:26 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:26 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:26 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:26 crc kubenswrapper[4946]: I1204 15:05:26.139098 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:27 crc kubenswrapper[4946]: I1204 15:05:27.136783 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:27 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:27 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:27 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:27 crc kubenswrapper[4946]: I1204 15:05:27.136875 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:27 crc kubenswrapper[4946]: I1204 15:05:27.526098 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 15:05:28 crc kubenswrapper[4946]: I1204 15:05:28.136619 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:28 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:28 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:28 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:28 crc kubenswrapper[4946]: I1204 15:05:28.137049 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:29 crc kubenswrapper[4946]: I1204 15:05:29.135960 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:29 crc kubenswrapper[4946]: [-]has-synced failed: reason withheld Dec 04 15:05:29 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:29 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:29 crc kubenswrapper[4946]: I1204 15:05:29.136143 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:30 crc kubenswrapper[4946]: I1204 15:05:30.144201 4946 patch_prober.go:28] interesting pod/router-default-5444994796-zzdp9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 15:05:30 crc kubenswrapper[4946]: [+]has-synced ok Dec 04 15:05:30 crc kubenswrapper[4946]: [+]process-running ok Dec 04 15:05:30 crc kubenswrapper[4946]: healthz check failed Dec 04 15:05:30 crc kubenswrapper[4946]: I1204 15:05:30.144727 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zzdp9" podUID="488929dd-9d70-4b9f-b41b-40be79becc36" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:05:31 crc kubenswrapper[4946]: I1204 15:05:31.136605 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:31 crc kubenswrapper[4946]: I1204 15:05:31.140510 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-zzdp9" Dec 04 15:05:31 crc kubenswrapper[4946]: I1204 15:05:31.744626 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:05:31 crc kubenswrapper[4946]: I1204 15:05:31.750068 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:05:32 crc kubenswrapper[4946]: I1204 15:05:32.587441 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 15:05:32 crc kubenswrapper[4946]: I1204 15:05:32.680189 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f722f20d-9a72-47d8-bec8-81d1dbc17a42","Type":"ContainerDied","Data":"1c305b9cf20ab1d99146421f17130a0344d107836dd4455603d7770605037d40"} Dec 04 15:05:32 crc kubenswrapper[4946]: I1204 15:05:32.680236 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 15:05:32 crc kubenswrapper[4946]: I1204 15:05:32.680261 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c305b9cf20ab1d99146421f17130a0344d107836dd4455603d7770605037d40" Dec 04 15:05:32 crc kubenswrapper[4946]: I1204 15:05:32.690404 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kube-api-access\") pod \"f722f20d-9a72-47d8-bec8-81d1dbc17a42\" (UID: \"f722f20d-9a72-47d8-bec8-81d1dbc17a42\") " Dec 04 15:05:32 crc kubenswrapper[4946]: I1204 15:05:32.690504 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kubelet-dir\") pod \"f722f20d-9a72-47d8-bec8-81d1dbc17a42\" (UID: \"f722f20d-9a72-47d8-bec8-81d1dbc17a42\") " Dec 04 15:05:32 crc kubenswrapper[4946]: I1204 15:05:32.690604 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f722f20d-9a72-47d8-bec8-81d1dbc17a42" (UID: "f722f20d-9a72-47d8-bec8-81d1dbc17a42"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:05:32 crc kubenswrapper[4946]: I1204 15:05:32.691267 4946 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 15:05:32 crc kubenswrapper[4946]: I1204 15:05:32.722540 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f722f20d-9a72-47d8-bec8-81d1dbc17a42" (UID: "f722f20d-9a72-47d8-bec8-81d1dbc17a42"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:05:32 crc kubenswrapper[4946]: I1204 15:05:32.792883 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f722f20d-9a72-47d8-bec8-81d1dbc17a42-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 15:05:33 crc kubenswrapper[4946]: I1204 15:05:33.000463 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:05:33 crc kubenswrapper[4946]: I1204 15:05:33.000530 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:05:33 crc kubenswrapper[4946]: I1204 15:05:33.000581 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-w56q9" Dec 04 15:05:33 crc kubenswrapper[4946]: I1204 15:05:33.000805 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:05:33 crc kubenswrapper[4946]: I1204 15:05:33.000911 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:05:33 crc kubenswrapper[4946]: I1204 15:05:33.001173 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"54ff70e6da225563ec691307f622feb5cd40f5a667d564da99cd01a5fa38aa2a"} pod="openshift-console/downloads-7954f5f757-w56q9" containerMessage="Container download-server failed liveness probe, will be restarted" Dec 04 15:05:33 crc kubenswrapper[4946]: I1204 15:05:33.001305 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" containerID="cri-o://54ff70e6da225563ec691307f622feb5cd40f5a667d564da99cd01a5fa38aa2a" gracePeriod=2 Dec 04 15:05:33 crc kubenswrapper[4946]: I1204 15:05:33.001427 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:05:33 crc kubenswrapper[4946]: I1204 15:05:33.001457 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:05:33 crc kubenswrapper[4946]: I1204 15:05:33.694273 4946 generic.go:334] "Generic (PLEG): container finished" podID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerID="54ff70e6da225563ec691307f622feb5cd40f5a667d564da99cd01a5fa38aa2a" exitCode=0 Dec 04 15:05:33 crc kubenswrapper[4946]: I1204 15:05:33.694334 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-w56q9" event={"ID":"53433a4f-ccda-4c5c-9dca-7389ec6d741c","Type":"ContainerDied","Data":"54ff70e6da225563ec691307f622feb5cd40f5a667d564da99cd01a5fa38aa2a"} Dec 04 15:05:36 crc kubenswrapper[4946]: I1204 15:05:36.327510 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:05:42 crc kubenswrapper[4946]: I1204 15:05:42.811699 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nbwkf" Dec 04 15:05:43 crc kubenswrapper[4946]: I1204 15:05:43.001948 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:05:43 crc kubenswrapper[4946]: I1204 15:05:43.002438 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:05:52 crc kubenswrapper[4946]: I1204 15:05:52.478863 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:05:52 crc kubenswrapper[4946]: I1204 15:05:52.479287 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:05:52 crc kubenswrapper[4946]: I1204 15:05:52.479341 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:05:52 crc kubenswrapper[4946]: I1204 15:05:52.480005 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:05:52 crc kubenswrapper[4946]: I1204 15:05:52.480068 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13" gracePeriod=600 Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.001351 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.001412 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.034304 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 04 15:05:53 crc kubenswrapper[4946]: E1204 15:05:53.034599 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9b9aedb-fb46-4613-a3e7-bd0b702e3772" containerName="pruner" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.034615 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9b9aedb-fb46-4613-a3e7-bd0b702e3772" containerName="pruner" Dec 04 15:05:53 crc kubenswrapper[4946]: E1204 15:05:53.034627 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f722f20d-9a72-47d8-bec8-81d1dbc17a42" containerName="pruner" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.034646 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f722f20d-9a72-47d8-bec8-81d1dbc17a42" containerName="pruner" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.034800 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f722f20d-9a72-47d8-bec8-81d1dbc17a42" containerName="pruner" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.034815 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9b9aedb-fb46-4613-a3e7-bd0b702e3772" containerName="pruner" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.035342 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.037669 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.042337 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.044383 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.113637 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef91c060-23b3-4aa6-9060-901db5169a46-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ef91c060-23b3-4aa6-9060-901db5169a46\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.113701 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ef91c060-23b3-4aa6-9060-901db5169a46-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ef91c060-23b3-4aa6-9060-901db5169a46\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.215323 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef91c060-23b3-4aa6-9060-901db5169a46-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ef91c060-23b3-4aa6-9060-901db5169a46\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.215424 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ef91c060-23b3-4aa6-9060-901db5169a46-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ef91c060-23b3-4aa6-9060-901db5169a46\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.215492 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ef91c060-23b3-4aa6-9060-901db5169a46-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ef91c060-23b3-4aa6-9060-901db5169a46\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.244039 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef91c060-23b3-4aa6-9060-901db5169a46-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ef91c060-23b3-4aa6-9060-901db5169a46\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.358712 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.824321 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13" exitCode=0 Dec 04 15:05:53 crc kubenswrapper[4946]: I1204 15:05:53.824406 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13"} Dec 04 15:05:54 crc kubenswrapper[4946]: E1204 15:05:54.286546 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 04 15:05:54 crc kubenswrapper[4946]: E1204 15:05:54.286781 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kfh42,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-fqqvb_openshift-marketplace(ca7d43a3-9406-4810-9105-ede64b23375e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 15:05:54 crc kubenswrapper[4946]: E1204 15:05:54.288004 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-fqqvb" podUID="ca7d43a3-9406-4810-9105-ede64b23375e" Dec 04 15:05:54 crc kubenswrapper[4946]: E1204 15:05:54.602404 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 04 15:05:54 crc kubenswrapper[4946]: E1204 15:05:54.603283 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rcdkj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-df2mv_openshift-marketplace(fec9d9bd-a20b-4625-9070-19949999c206): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 15:05:54 crc kubenswrapper[4946]: E1204 15:05:54.604558 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-df2mv" podUID="fec9d9bd-a20b-4625-9070-19949999c206" Dec 04 15:05:57 crc kubenswrapper[4946]: E1204 15:05:57.246638 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-fqqvb" podUID="ca7d43a3-9406-4810-9105-ede64b23375e" Dec 04 15:05:57 crc kubenswrapper[4946]: E1204 15:05:57.246664 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-df2mv" podUID="fec9d9bd-a20b-4625-9070-19949999c206" Dec 04 15:05:57 crc kubenswrapper[4946]: E1204 15:05:57.341284 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 04 15:05:57 crc kubenswrapper[4946]: E1204 15:05:57.341488 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rd86j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-kr77j_openshift-marketplace(74c8940c-1ed3-4aaa-94aa-0623f25f008e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 15:05:57 crc kubenswrapper[4946]: E1204 15:05:57.342961 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-kr77j" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" Dec 04 15:05:57 crc kubenswrapper[4946]: I1204 15:05:57.835848 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 04 15:05:57 crc kubenswrapper[4946]: I1204 15:05:57.836757 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:05:57 crc kubenswrapper[4946]: I1204 15:05:57.846754 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 04 15:05:57 crc kubenswrapper[4946]: I1204 15:05:57.886760 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:05:57 crc kubenswrapper[4946]: I1204 15:05:57.886825 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kube-api-access\") pod \"installer-9-crc\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:05:57 crc kubenswrapper[4946]: I1204 15:05:57.886943 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-var-lock\") pod \"installer-9-crc\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:05:57 crc kubenswrapper[4946]: I1204 15:05:57.990835 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:05:57 crc kubenswrapper[4946]: I1204 15:05:57.990914 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kube-api-access\") pod \"installer-9-crc\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:05:57 crc kubenswrapper[4946]: I1204 15:05:57.990963 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-var-lock\") pod \"installer-9-crc\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:05:57 crc kubenswrapper[4946]: I1204 15:05:57.991032 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:05:57 crc kubenswrapper[4946]: I1204 15:05:57.991089 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-var-lock\") pod \"installer-9-crc\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:05:58 crc kubenswrapper[4946]: I1204 15:05:58.014322 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kube-api-access\") pod \"installer-9-crc\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:05:58 crc kubenswrapper[4946]: I1204 15:05:58.172635 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:05:58 crc kubenswrapper[4946]: E1204 15:05:58.432225 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-kr77j" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" Dec 04 15:05:58 crc kubenswrapper[4946]: E1204 15:05:58.519780 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 04 15:05:58 crc kubenswrapper[4946]: E1204 15:05:58.520035 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cxtzx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-vrlxj_openshift-marketplace(7a98b449-5e32-4f53-8829-fc2d01b603b8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 15:05:58 crc kubenswrapper[4946]: E1204 15:05:58.521283 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-vrlxj" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.103538 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-vrlxj" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.207197 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.207906 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rkx6z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-p65s4_openshift-marketplace(9c9bd510-5d62-4814-bd88-62c5a3051f9d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.211473 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-p65s4" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.223576 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.223877 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m8gdn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-7mzks_openshift-marketplace(73b58e95-46d5-468b-9890-a4fc3c5a0bde): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.225055 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.225171 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-7mzks" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.225299 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t5jk2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-8mgfv_openshift-marketplace(8c9f7504-90bf-4e33-be97-43f5d81896ae): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.228067 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-8mgfv" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.257370 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.257551 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-46l8s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-m666j_openshift-marketplace(c847e258-e0f2-4129-bcf4-6fc12cd4dfe5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.258906 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-m666j" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" Dec 04 15:06:00 crc kubenswrapper[4946]: I1204 15:06:00.645189 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 04 15:06:00 crc kubenswrapper[4946]: I1204 15:06:00.701523 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 04 15:06:00 crc kubenswrapper[4946]: I1204 15:06:00.885345 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2ba0b499-56a8-4e62-93cc-c2f2f1cad117","Type":"ContainerStarted","Data":"fc8f7ca290516d6701e8f16d318288e6d672f516ae3df6920cc2ae9845889079"} Dec 04 15:06:00 crc kubenswrapper[4946]: I1204 15:06:00.900650 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"34fd78eefbc4faaf18027485e3a960a582716ccf43fd9f02cdf83bf0a757e5e8"} Dec 04 15:06:00 crc kubenswrapper[4946]: I1204 15:06:00.904961 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-w56q9" event={"ID":"53433a4f-ccda-4c5c-9dca-7389ec6d741c","Type":"ContainerStarted","Data":"805a45ab3bd4368f3f256b76ae360c0d24ff2925196e37ae2d39bf04637c36fa"} Dec 04 15:06:00 crc kubenswrapper[4946]: I1204 15:06:00.905905 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-w56q9" Dec 04 15:06:00 crc kubenswrapper[4946]: I1204 15:06:00.906749 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:06:00 crc kubenswrapper[4946]: I1204 15:06:00.906792 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:06:00 crc kubenswrapper[4946]: I1204 15:06:00.913355 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ef91c060-23b3-4aa6-9060-901db5169a46","Type":"ContainerStarted","Data":"8f21a66fe2398d6a1c200b53afdd78e6be177f41f9e9476f92ce5f9606d1ac57"} Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.916605 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-p65s4" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.916782 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-m666j" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.917598 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-8mgfv" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" Dec 04 15:06:00 crc kubenswrapper[4946]: E1204 15:06:00.918002 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-7mzks" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" Dec 04 15:06:01 crc kubenswrapper[4946]: I1204 15:06:01.919590 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2ba0b499-56a8-4e62-93cc-c2f2f1cad117","Type":"ContainerStarted","Data":"d205aefda6c8f7fa376bca235ccc7f6207a8f8cc183f29b35153baf2e39537b5"} Dec 04 15:06:01 crc kubenswrapper[4946]: I1204 15:06:01.922467 4946 generic.go:334] "Generic (PLEG): container finished" podID="ef91c060-23b3-4aa6-9060-901db5169a46" containerID="c6fac4c9874a189fe41154e8f428661e9e8f8dd44adb229fed89219ad036b4d7" exitCode=0 Dec 04 15:06:01 crc kubenswrapper[4946]: I1204 15:06:01.922557 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ef91c060-23b3-4aa6-9060-901db5169a46","Type":"ContainerDied","Data":"c6fac4c9874a189fe41154e8f428661e9e8f8dd44adb229fed89219ad036b4d7"} Dec 04 15:06:01 crc kubenswrapper[4946]: I1204 15:06:01.923419 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:06:01 crc kubenswrapper[4946]: I1204 15:06:01.923503 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:06:01 crc kubenswrapper[4946]: I1204 15:06:01.988429 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=4.988372545 podStartE2EDuration="4.988372545s" podCreationTimestamp="2025-12-04 15:05:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:06:01.945214894 +0000 UTC m=+212.831258545" watchObservedRunningTime="2025-12-04 15:06:01.988372545 +0000 UTC m=+212.874416186" Dec 04 15:06:02 crc kubenswrapper[4946]: I1204 15:06:02.933636 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:06:02 crc kubenswrapper[4946]: I1204 15:06:02.934645 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.000419 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.000859 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.000487 4946 patch_prober.go:28] interesting pod/downloads-7954f5f757-w56q9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" start-of-body= Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.001015 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-w56q9" podUID="53433a4f-ccda-4c5c-9dca-7389ec6d741c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.20:8080/\": dial tcp 10.217.0.20:8080: connect: connection refused" Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.182692 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.271298 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef91c060-23b3-4aa6-9060-901db5169a46-kube-api-access\") pod \"ef91c060-23b3-4aa6-9060-901db5169a46\" (UID: \"ef91c060-23b3-4aa6-9060-901db5169a46\") " Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.271363 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ef91c060-23b3-4aa6-9060-901db5169a46-kubelet-dir\") pod \"ef91c060-23b3-4aa6-9060-901db5169a46\" (UID: \"ef91c060-23b3-4aa6-9060-901db5169a46\") " Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.271541 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef91c060-23b3-4aa6-9060-901db5169a46-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ef91c060-23b3-4aa6-9060-901db5169a46" (UID: "ef91c060-23b3-4aa6-9060-901db5169a46"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.283443 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef91c060-23b3-4aa6-9060-901db5169a46-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ef91c060-23b3-4aa6-9060-901db5169a46" (UID: "ef91c060-23b3-4aa6-9060-901db5169a46"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.372244 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef91c060-23b3-4aa6-9060-901db5169a46-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.372275 4946 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ef91c060-23b3-4aa6-9060-901db5169a46-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.935477 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ef91c060-23b3-4aa6-9060-901db5169a46","Type":"ContainerDied","Data":"8f21a66fe2398d6a1c200b53afdd78e6be177f41f9e9476f92ce5f9606d1ac57"} Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.935517 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f21a66fe2398d6a1c200b53afdd78e6be177f41f9e9476f92ce5f9606d1ac57" Dec 04 15:06:03 crc kubenswrapper[4946]: I1204 15:06:03.935572 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 15:06:04 crc kubenswrapper[4946]: I1204 15:06:04.994975 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cp7w9"] Dec 04 15:06:13 crc kubenswrapper[4946]: I1204 15:06:13.007814 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-w56q9" Dec 04 15:06:14 crc kubenswrapper[4946]: I1204 15:06:14.049100 4946 generic.go:334] "Generic (PLEG): container finished" podID="ca7d43a3-9406-4810-9105-ede64b23375e" containerID="4d78f4fa2cec8f9a07b7b98125c9e44d9496da7c3a0cfd753c613a300d8670cf" exitCode=0 Dec 04 15:06:14 crc kubenswrapper[4946]: I1204 15:06:14.049171 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqqvb" event={"ID":"ca7d43a3-9406-4810-9105-ede64b23375e","Type":"ContainerDied","Data":"4d78f4fa2cec8f9a07b7b98125c9e44d9496da7c3a0cfd753c613a300d8670cf"} Dec 04 15:06:14 crc kubenswrapper[4946]: I1204 15:06:14.053142 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr77j" event={"ID":"74c8940c-1ed3-4aaa-94aa-0623f25f008e","Type":"ContainerStarted","Data":"91958b72d326f4106921c520f0b316281727518bc5d4704ba6e138d61acd2335"} Dec 04 15:06:15 crc kubenswrapper[4946]: I1204 15:06:15.063326 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m666j" event={"ID":"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5","Type":"ContainerStarted","Data":"4d0b141bd14f5252f1a968e4316ba4d0ac711635aef7b5e8b9a083eb2106c953"} Dec 04 15:06:15 crc kubenswrapper[4946]: I1204 15:06:15.066131 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-df2mv" event={"ID":"fec9d9bd-a20b-4625-9070-19949999c206","Type":"ContainerStarted","Data":"412175715a6662718e4875aba9eba1b3c486684064a2e821e193eced06a5d3e7"} Dec 04 15:06:16 crc kubenswrapper[4946]: I1204 15:06:16.081593 4946 generic.go:334] "Generic (PLEG): container finished" podID="fec9d9bd-a20b-4625-9070-19949999c206" containerID="412175715a6662718e4875aba9eba1b3c486684064a2e821e193eced06a5d3e7" exitCode=0 Dec 04 15:06:16 crc kubenswrapper[4946]: I1204 15:06:16.081666 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-df2mv" event={"ID":"fec9d9bd-a20b-4625-9070-19949999c206","Type":"ContainerDied","Data":"412175715a6662718e4875aba9eba1b3c486684064a2e821e193eced06a5d3e7"} Dec 04 15:06:16 crc kubenswrapper[4946]: I1204 15:06:16.085292 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mzks" event={"ID":"73b58e95-46d5-468b-9890-a4fc3c5a0bde","Type":"ContainerStarted","Data":"2b442f0743d9e4659f97cb863caee7b13cb7e257a4d49f89f7d6063feeb5fbc0"} Dec 04 15:06:16 crc kubenswrapper[4946]: I1204 15:06:16.086785 4946 generic.go:334] "Generic (PLEG): container finished" podID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" containerID="91958b72d326f4106921c520f0b316281727518bc5d4704ba6e138d61acd2335" exitCode=0 Dec 04 15:06:16 crc kubenswrapper[4946]: I1204 15:06:16.086824 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr77j" event={"ID":"74c8940c-1ed3-4aaa-94aa-0623f25f008e","Type":"ContainerDied","Data":"91958b72d326f4106921c520f0b316281727518bc5d4704ba6e138d61acd2335"} Dec 04 15:06:18 crc kubenswrapper[4946]: I1204 15:06:18.099753 4946 generic.go:334] "Generic (PLEG): container finished" podID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" containerID="2b442f0743d9e4659f97cb863caee7b13cb7e257a4d49f89f7d6063feeb5fbc0" exitCode=0 Dec 04 15:06:18 crc kubenswrapper[4946]: I1204 15:06:18.099881 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mzks" event={"ID":"73b58e95-46d5-468b-9890-a4fc3c5a0bde","Type":"ContainerDied","Data":"2b442f0743d9e4659f97cb863caee7b13cb7e257a4d49f89f7d6063feeb5fbc0"} Dec 04 15:06:19 crc kubenswrapper[4946]: I1204 15:06:19.108224 4946 generic.go:334] "Generic (PLEG): container finished" podID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" containerID="4d0b141bd14f5252f1a968e4316ba4d0ac711635aef7b5e8b9a083eb2106c953" exitCode=0 Dec 04 15:06:19 crc kubenswrapper[4946]: I1204 15:06:19.108371 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m666j" event={"ID":"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5","Type":"ContainerDied","Data":"4d0b141bd14f5252f1a968e4316ba4d0ac711635aef7b5e8b9a083eb2106c953"} Dec 04 15:06:19 crc kubenswrapper[4946]: I1204 15:06:19.111697 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqqvb" event={"ID":"ca7d43a3-9406-4810-9105-ede64b23375e","Type":"ContainerStarted","Data":"8be6f1d7be01c79141662fe60594441d7adf3e766f1330aa686c139359f26be6"} Dec 04 15:06:19 crc kubenswrapper[4946]: I1204 15:06:19.150647 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fqqvb" podStartSLOduration=4.92023089 podStartE2EDuration="1m11.150617719s" podCreationTimestamp="2025-12-04 15:05:08 +0000 UTC" firstStartedPulling="2025-12-04 15:05:11.515566858 +0000 UTC m=+162.401610499" lastFinishedPulling="2025-12-04 15:06:17.745953687 +0000 UTC m=+228.631997328" observedRunningTime="2025-12-04 15:06:19.148059081 +0000 UTC m=+230.034102722" watchObservedRunningTime="2025-12-04 15:06:19.150617719 +0000 UTC m=+230.036661380" Dec 04 15:06:28 crc kubenswrapper[4946]: I1204 15:06:28.900097 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:06:28 crc kubenswrapper[4946]: I1204 15:06:28.901024 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:06:29 crc kubenswrapper[4946]: I1204 15:06:29.381558 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:06:29 crc kubenswrapper[4946]: I1204 15:06:29.434499 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:06:29 crc kubenswrapper[4946]: I1204 15:06:29.624025 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fqqvb"] Dec 04 15:06:30 crc kubenswrapper[4946]: I1204 15:06:30.034221 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" podUID="02de3a18-59d7-48c0-bf9c-d40c09ed8cee" containerName="oauth-openshift" containerID="cri-o://6ebf73ab902b371237ebee0052503c24a8656af082c62b7be81eb5296c96fde6" gracePeriod=15 Dec 04 15:06:31 crc kubenswrapper[4946]: I1204 15:06:31.194967 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fqqvb" podUID="ca7d43a3-9406-4810-9105-ede64b23375e" containerName="registry-server" containerID="cri-o://8be6f1d7be01c79141662fe60594441d7adf3e766f1330aa686c139359f26be6" gracePeriod=2 Dec 04 15:06:32 crc kubenswrapper[4946]: I1204 15:06:32.200724 4946 generic.go:334] "Generic (PLEG): container finished" podID="02de3a18-59d7-48c0-bf9c-d40c09ed8cee" containerID="6ebf73ab902b371237ebee0052503c24a8656af082c62b7be81eb5296c96fde6" exitCode=0 Dec 04 15:06:32 crc kubenswrapper[4946]: I1204 15:06:32.201005 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" event={"ID":"02de3a18-59d7-48c0-bf9c-d40c09ed8cee","Type":"ContainerDied","Data":"6ebf73ab902b371237ebee0052503c24a8656af082c62b7be81eb5296c96fde6"} Dec 04 15:06:32 crc kubenswrapper[4946]: I1204 15:06:32.206163 4946 generic.go:334] "Generic (PLEG): container finished" podID="ca7d43a3-9406-4810-9105-ede64b23375e" containerID="8be6f1d7be01c79141662fe60594441d7adf3e766f1330aa686c139359f26be6" exitCode=0 Dec 04 15:06:32 crc kubenswrapper[4946]: I1204 15:06:32.206231 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqqvb" event={"ID":"ca7d43a3-9406-4810-9105-ede64b23375e","Type":"ContainerDied","Data":"8be6f1d7be01c79141662fe60594441d7adf3e766f1330aa686c139359f26be6"} Dec 04 15:06:33 crc kubenswrapper[4946]: I1204 15:06:33.397103 4946 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-cp7w9 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" start-of-body= Dec 04 15:06:33 crc kubenswrapper[4946]: I1204 15:06:33.397272 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" podUID="02de3a18-59d7-48c0-bf9c-d40c09ed8cee" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.642748 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.679638 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-68b6dd9b65-p7shk"] Dec 04 15:06:34 crc kubenswrapper[4946]: E1204 15:06:34.679994 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02de3a18-59d7-48c0-bf9c-d40c09ed8cee" containerName="oauth-openshift" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.680020 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="02de3a18-59d7-48c0-bf9c-d40c09ed8cee" containerName="oauth-openshift" Dec 04 15:06:34 crc kubenswrapper[4946]: E1204 15:06:34.680053 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef91c060-23b3-4aa6-9060-901db5169a46" containerName="pruner" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.680063 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef91c060-23b3-4aa6-9060-901db5169a46" containerName="pruner" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.680238 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="02de3a18-59d7-48c0-bf9c-d40c09ed8cee" containerName="oauth-openshift" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.680268 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef91c060-23b3-4aa6-9060-901db5169a46" containerName="pruner" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.680862 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.692105 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-68b6dd9b65-p7shk"] Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.713331 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.787764 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-ocp-branding-template\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.787830 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-trusted-ca-bundle\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.787850 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-idp-0-file-data\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.787895 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-policies\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.787911 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-session\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.787949 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.787998 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-error\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788036 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-provider-selection\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788056 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t529n\" (UniqueName: \"kubernetes.io/projected/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-kube-api-access-t529n\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788083 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-serving-cert\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788100 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-cliconfig\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788131 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-service-ca\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788154 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-login\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788259 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-dir\") pod \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\" (UID: \"02de3a18-59d7-48c0-bf9c-d40c09ed8cee\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788503 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-service-ca\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788544 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-router-certs\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788590 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788621 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-template-login\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788661 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-session\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788680 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.788700 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl4qw\" (UniqueName: \"kubernetes.io/projected/17928ac6-ea25-4070-93c5-f36f530e86f9-kube-api-access-gl4qw\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.789943 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.790522 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.790809 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/17928ac6-ea25-4070-93c5-f36f530e86f9-audit-dir\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.790995 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.791091 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.791214 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-audit-policies\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.791233 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.791254 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.791274 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-template-error\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.791314 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.791325 4946 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.796317 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.797073 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.797086 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.797596 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.798139 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.798503 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.798812 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-kube-api-access-t529n" (OuterVolumeSpecName: "kube-api-access-t529n") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "kube-api-access-t529n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.799062 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.803109 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.804469 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.805050 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.809546 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "02de3a18-59d7-48c0-bf9c-d40c09ed8cee" (UID: "02de3a18-59d7-48c0-bf9c-d40c09ed8cee"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.892590 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfh42\" (UniqueName: \"kubernetes.io/projected/ca7d43a3-9406-4810-9105-ede64b23375e-kube-api-access-kfh42\") pod \"ca7d43a3-9406-4810-9105-ede64b23375e\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.892680 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-utilities\") pod \"ca7d43a3-9406-4810-9105-ede64b23375e\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.892744 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-catalog-content\") pod \"ca7d43a3-9406-4810-9105-ede64b23375e\" (UID: \"ca7d43a3-9406-4810-9105-ede64b23375e\") " Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.892936 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.892964 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-template-login\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.892993 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893011 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-session\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893026 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl4qw\" (UniqueName: \"kubernetes.io/projected/17928ac6-ea25-4070-93c5-f36f530e86f9-kube-api-access-gl4qw\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893045 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/17928ac6-ea25-4070-93c5-f36f530e86f9-audit-dir\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893079 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893102 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893149 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-audit-policies\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893164 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893181 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893201 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-template-error\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893232 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-service-ca\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893260 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-router-certs\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893308 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893321 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893332 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893342 4946 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893352 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893362 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893372 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893382 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893394 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t529n\" (UniqueName: \"kubernetes.io/projected/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-kube-api-access-t529n\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893403 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893412 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.893424 4946 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/02de3a18-59d7-48c0-bf9c-d40c09ed8cee-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.894394 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-utilities" (OuterVolumeSpecName: "utilities") pod "ca7d43a3-9406-4810-9105-ede64b23375e" (UID: "ca7d43a3-9406-4810-9105-ede64b23375e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.894834 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.895412 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-audit-policies\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.896171 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-service-ca\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.896376 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.896437 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/17928ac6-ea25-4070-93c5-f36f530e86f9-audit-dir\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.899908 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.900665 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.912779 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca7d43a3-9406-4810-9105-ede64b23375e-kube-api-access-kfh42" (OuterVolumeSpecName: "kube-api-access-kfh42") pod "ca7d43a3-9406-4810-9105-ede64b23375e" (UID: "ca7d43a3-9406-4810-9105-ede64b23375e"). InnerVolumeSpecName "kube-api-access-kfh42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.912983 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.913301 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-session\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.913445 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-router-certs\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.913883 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-template-error\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.915590 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-user-template-login\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.915631 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/17928ac6-ea25-4070-93c5-f36f530e86f9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.919070 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl4qw\" (UniqueName: \"kubernetes.io/projected/17928ac6-ea25-4070-93c5-f36f530e86f9-kube-api-access-gl4qw\") pod \"oauth-openshift-68b6dd9b65-p7shk\" (UID: \"17928ac6-ea25-4070-93c5-f36f530e86f9\") " pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.984529 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ca7d43a3-9406-4810-9105-ede64b23375e" (UID: "ca7d43a3-9406-4810-9105-ede64b23375e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.994416 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfh42\" (UniqueName: \"kubernetes.io/projected/ca7d43a3-9406-4810-9105-ede64b23375e-kube-api-access-kfh42\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.994468 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:34 crc kubenswrapper[4946]: I1204 15:06:34.994479 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca7d43a3-9406-4810-9105-ede64b23375e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.005252 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.226708 4946 generic.go:334] "Generic (PLEG): container finished" podID="8c9f7504-90bf-4e33-be97-43f5d81896ae" containerID="1268cd3144473fc88da36fc8555082e9b8c28d645ba1bf35b453fd93dc5825e2" exitCode=0 Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.226797 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8mgfv" event={"ID":"8c9f7504-90bf-4e33-be97-43f5d81896ae","Type":"ContainerDied","Data":"1268cd3144473fc88da36fc8555082e9b8c28d645ba1bf35b453fd93dc5825e2"} Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.236511 4946 generic.go:334] "Generic (PLEG): container finished" podID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" containerID="ec80fda59a454836a1fafb8e0d51b85e9f921e4cff448bab6ac0010797bcf9c8" exitCode=0 Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.236686 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p65s4" event={"ID":"9c9bd510-5d62-4814-bd88-62c5a3051f9d","Type":"ContainerDied","Data":"ec80fda59a454836a1fafb8e0d51b85e9f921e4cff448bab6ac0010797bcf9c8"} Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.240491 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr77j" event={"ID":"74c8940c-1ed3-4aaa-94aa-0623f25f008e","Type":"ContainerStarted","Data":"8a1eaff474378f6984f2824129d2075fcd592707a9632c8751552b805e03a97a"} Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.246959 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mzks" event={"ID":"73b58e95-46d5-468b-9890-a4fc3c5a0bde","Type":"ContainerStarted","Data":"e992c766db9d9d20751838d21dac482aa691c4f5bd717b1a7a4ebe0bf98b7d39"} Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.252557 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqqvb" event={"ID":"ca7d43a3-9406-4810-9105-ede64b23375e","Type":"ContainerDied","Data":"1173293872eb5ef463209b4d3c86e38c44b225cd33be064e7f2a89d9555946d4"} Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.252606 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqqvb" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.252619 4946 scope.go:117] "RemoveContainer" containerID="8be6f1d7be01c79141662fe60594441d7adf3e766f1330aa686c139359f26be6" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.257068 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-df2mv" event={"ID":"fec9d9bd-a20b-4625-9070-19949999c206","Type":"ContainerStarted","Data":"8ff5d0de19a536206b38267bfa2e1cf79b4b48c7f8f8c48050e119d0b8e3d59a"} Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.260351 4946 generic.go:334] "Generic (PLEG): container finished" podID="7a98b449-5e32-4f53-8829-fc2d01b603b8" containerID="88721f75faa2fb1fd1966132e0bf9a0011a27f8a16a2224ec92d366d9ae7231b" exitCode=0 Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.260436 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlxj" event={"ID":"7a98b449-5e32-4f53-8829-fc2d01b603b8","Type":"ContainerDied","Data":"88721f75faa2fb1fd1966132e0bf9a0011a27f8a16a2224ec92d366d9ae7231b"} Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.262663 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.262660 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cp7w9" event={"ID":"02de3a18-59d7-48c0-bf9c-d40c09ed8cee","Type":"ContainerDied","Data":"ad10e3b4559f19e1bfcb249147e585d52fc2c77595bc1b8161d22b7f6334609e"} Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.282340 4946 scope.go:117] "RemoveContainer" containerID="4d78f4fa2cec8f9a07b7b98125c9e44d9496da7c3a0cfd753c613a300d8670cf" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.283897 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m666j" event={"ID":"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5","Type":"ContainerStarted","Data":"6dd2e8d46022b0e9908a3b1243d4d720615730d5c0386bf24fb4257abb30ace3"} Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.300071 4946 scope.go:117] "RemoveContainer" containerID="93f940ca1cc04727811d4e6c3a33875f2621906ca18d6494efd6052d5d565f54" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.315491 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7mzks" podStartSLOduration=4.129223374 podStartE2EDuration="1m27.315467093s" podCreationTimestamp="2025-12-04 15:05:08 +0000 UTC" firstStartedPulling="2025-12-04 15:05:11.192521939 +0000 UTC m=+162.078565580" lastFinishedPulling="2025-12-04 15:06:34.378765658 +0000 UTC m=+245.264809299" observedRunningTime="2025-12-04 15:06:35.296537258 +0000 UTC m=+246.182580899" watchObservedRunningTime="2025-12-04 15:06:35.315467093 +0000 UTC m=+246.201510944" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.316213 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kr77j" podStartSLOduration=4.950315461 podStartE2EDuration="1m24.316204095s" podCreationTimestamp="2025-12-04 15:05:11 +0000 UTC" firstStartedPulling="2025-12-04 15:05:14.992371131 +0000 UTC m=+165.878414772" lastFinishedPulling="2025-12-04 15:06:34.358259725 +0000 UTC m=+245.244303406" observedRunningTime="2025-12-04 15:06:35.314091561 +0000 UTC m=+246.200135202" watchObservedRunningTime="2025-12-04 15:06:35.316204095 +0000 UTC m=+246.202247736" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.319551 4946 scope.go:117] "RemoveContainer" containerID="6ebf73ab902b371237ebee0052503c24a8656af082c62b7be81eb5296c96fde6" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.340112 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-df2mv" podStartSLOduration=4.226048723 podStartE2EDuration="1m27.340087281s" podCreationTimestamp="2025-12-04 15:05:08 +0000 UTC" firstStartedPulling="2025-12-04 15:05:11.293100032 +0000 UTC m=+162.179143673" lastFinishedPulling="2025-12-04 15:06:34.40713859 +0000 UTC m=+245.293182231" observedRunningTime="2025-12-04 15:06:35.336282965 +0000 UTC m=+246.222326616" watchObservedRunningTime="2025-12-04 15:06:35.340087281 +0000 UTC m=+246.226130922" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.352899 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m666j" podStartSLOduration=4.931920344 podStartE2EDuration="1m24.352879559s" podCreationTimestamp="2025-12-04 15:05:11 +0000 UTC" firstStartedPulling="2025-12-04 15:05:14.986540486 +0000 UTC m=+165.872584127" lastFinishedPulling="2025-12-04 15:06:34.407499701 +0000 UTC m=+245.293543342" observedRunningTime="2025-12-04 15:06:35.349221738 +0000 UTC m=+246.235265389" watchObservedRunningTime="2025-12-04 15:06:35.352879559 +0000 UTC m=+246.238923200" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.366808 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cp7w9"] Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.370538 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cp7w9"] Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.396306 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fqqvb"] Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.401932 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fqqvb"] Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.428105 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-68b6dd9b65-p7shk"] Dec 04 15:06:35 crc kubenswrapper[4946]: W1204 15:06:35.436037 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17928ac6_ea25_4070_93c5_f36f530e86f9.slice/crio-abca556a45a218738fbbbced46309c3a40adb29394c717a3b346f4edf1409432 WatchSource:0}: Error finding container abca556a45a218738fbbbced46309c3a40adb29394c717a3b346f4edf1409432: Status 404 returned error can't find the container with id abca556a45a218738fbbbced46309c3a40adb29394c717a3b346f4edf1409432 Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.460508 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02de3a18-59d7-48c0-bf9c-d40c09ed8cee" path="/var/lib/kubelet/pods/02de3a18-59d7-48c0-bf9c-d40c09ed8cee/volumes" Dec 04 15:06:35 crc kubenswrapper[4946]: I1204 15:06:35.461383 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca7d43a3-9406-4810-9105-ede64b23375e" path="/var/lib/kubelet/pods/ca7d43a3-9406-4810-9105-ede64b23375e/volumes" Dec 04 15:06:36 crc kubenswrapper[4946]: I1204 15:06:36.295457 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8mgfv" event={"ID":"8c9f7504-90bf-4e33-be97-43f5d81896ae","Type":"ContainerStarted","Data":"f13e1ed4d91b07976e087c3006e87b65ea0cb094f53b49799c938467c66bca8f"} Dec 04 15:06:36 crc kubenswrapper[4946]: I1204 15:06:36.297289 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" event={"ID":"17928ac6-ea25-4070-93c5-f36f530e86f9","Type":"ContainerStarted","Data":"7b6c28af243c5c5a3e94e2cb6c59fc620fdcb201352749196ad813d0c33f124c"} Dec 04 15:06:36 crc kubenswrapper[4946]: I1204 15:06:36.297337 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" event={"ID":"17928ac6-ea25-4070-93c5-f36f530e86f9","Type":"ContainerStarted","Data":"abca556a45a218738fbbbced46309c3a40adb29394c717a3b346f4edf1409432"} Dec 04 15:06:36 crc kubenswrapper[4946]: I1204 15:06:36.297635 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:36 crc kubenswrapper[4946]: I1204 15:06:36.300879 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p65s4" event={"ID":"9c9bd510-5d62-4814-bd88-62c5a3051f9d","Type":"ContainerStarted","Data":"417a02e48c8d926d32b45be48cbe68d8ee5db50498770d81bfe1c02efa84c7e7"} Dec 04 15:06:36 crc kubenswrapper[4946]: I1204 15:06:36.303437 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlxj" event={"ID":"7a98b449-5e32-4f53-8829-fc2d01b603b8","Type":"ContainerStarted","Data":"04c5e1e86ce28602ac8333d7f6be01a97007faa7102cee140fae2de6453a9b26"} Dec 04 15:06:36 crc kubenswrapper[4946]: I1204 15:06:36.303936 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" Dec 04 15:06:36 crc kubenswrapper[4946]: I1204 15:06:36.322504 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8mgfv" podStartSLOduration=4.362642699 podStartE2EDuration="1m28.322485985s" podCreationTimestamp="2025-12-04 15:05:08 +0000 UTC" firstStartedPulling="2025-12-04 15:05:11.691024906 +0000 UTC m=+162.577068547" lastFinishedPulling="2025-12-04 15:06:35.650868192 +0000 UTC m=+246.536911833" observedRunningTime="2025-12-04 15:06:36.318469683 +0000 UTC m=+247.204513324" watchObservedRunningTime="2025-12-04 15:06:36.322485985 +0000 UTC m=+247.208529626" Dec 04 15:06:36 crc kubenswrapper[4946]: I1204 15:06:36.382078 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vrlxj" podStartSLOduration=3.35667299 podStartE2EDuration="1m26.382057655s" podCreationTimestamp="2025-12-04 15:05:10 +0000 UTC" firstStartedPulling="2025-12-04 15:05:12.999348385 +0000 UTC m=+163.885392026" lastFinishedPulling="2025-12-04 15:06:36.02473305 +0000 UTC m=+246.910776691" observedRunningTime="2025-12-04 15:06:36.378558718 +0000 UTC m=+247.264602359" watchObservedRunningTime="2025-12-04 15:06:36.382057655 +0000 UTC m=+247.268101306" Dec 04 15:06:36 crc kubenswrapper[4946]: I1204 15:06:36.382249 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-68b6dd9b65-p7shk" podStartSLOduration=31.38223813 podStartE2EDuration="31.38223813s" podCreationTimestamp="2025-12-04 15:06:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:06:36.351976691 +0000 UTC m=+247.238020332" watchObservedRunningTime="2025-12-04 15:06:36.38223813 +0000 UTC m=+247.268281791" Dec 04 15:06:36 crc kubenswrapper[4946]: I1204 15:06:36.404873 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p65s4" podStartSLOduration=3.734896118 podStartE2EDuration="1m26.404853127s" podCreationTimestamp="2025-12-04 15:05:10 +0000 UTC" firstStartedPulling="2025-12-04 15:05:12.950426827 +0000 UTC m=+163.836470468" lastFinishedPulling="2025-12-04 15:06:35.620383836 +0000 UTC m=+246.506427477" observedRunningTime="2025-12-04 15:06:36.401738513 +0000 UTC m=+247.287782154" watchObservedRunningTime="2025-12-04 15:06:36.404853127 +0000 UTC m=+247.290896768" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.453787 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.454056 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.501218 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.788903 4946 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 15:06:38 crc kubenswrapper[4946]: E1204 15:06:38.789727 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca7d43a3-9406-4810-9105-ede64b23375e" containerName="registry-server" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.789768 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca7d43a3-9406-4810-9105-ede64b23375e" containerName="registry-server" Dec 04 15:06:38 crc kubenswrapper[4946]: E1204 15:06:38.789782 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca7d43a3-9406-4810-9105-ede64b23375e" containerName="extract-utilities" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.789791 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca7d43a3-9406-4810-9105-ede64b23375e" containerName="extract-utilities" Dec 04 15:06:38 crc kubenswrapper[4946]: E1204 15:06:38.789832 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca7d43a3-9406-4810-9105-ede64b23375e" containerName="extract-content" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.789846 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca7d43a3-9406-4810-9105-ede64b23375e" containerName="extract-content" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.790202 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca7d43a3-9406-4810-9105-ede64b23375e" containerName="registry-server" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.791092 4946 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.791192 4946 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 15:06:38 crc kubenswrapper[4946]: E1204 15:06:38.791612 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.791637 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 04 15:06:38 crc kubenswrapper[4946]: E1204 15:06:38.791653 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.791660 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 15:06:38 crc kubenswrapper[4946]: E1204 15:06:38.791672 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.791681 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 04 15:06:38 crc kubenswrapper[4946]: E1204 15:06:38.791699 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.791707 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 15:06:38 crc kubenswrapper[4946]: E1204 15:06:38.791719 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.791726 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 04 15:06:38 crc kubenswrapper[4946]: E1204 15:06:38.791747 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.791753 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 04 15:06:38 crc kubenswrapper[4946]: E1204 15:06:38.791770 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.791776 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.791589 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.792018 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8" gracePeriod=15 Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.792750 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.792807 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.792825 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.792844 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.792863 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.792895 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.794452 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a" gracePeriod=15 Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.794735 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563" gracePeriod=15 Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.794808 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4" gracePeriod=15 Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.794878 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d" gracePeriod=15 Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.801463 4946 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.819708 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.820951 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.865597 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.881495 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.956407 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.956452 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.956496 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.956526 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.956578 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.956605 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.956622 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.956661 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.982754 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:06:38 crc kubenswrapper[4946]: I1204 15:06:38.983227 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.031717 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.057965 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058023 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058046 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058097 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058140 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058155 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058165 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058190 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058752 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058784 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058816 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058892 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058926 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.058966 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.059017 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.059014 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.161884 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:06:39 crc kubenswrapper[4946]: W1204 15:06:39.182724 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-40f0e7e75ac5f13a9ec8e293bf05a0de84bbd69f0270642973f6ba6360e967bd WatchSource:0}: Error finding container 40f0e7e75ac5f13a9ec8e293bf05a0de84bbd69f0270642973f6ba6360e967bd: Status 404 returned error can't find the container with id 40f0e7e75ac5f13a9ec8e293bf05a0de84bbd69f0270642973f6ba6360e967bd Dec 04 15:06:39 crc kubenswrapper[4946]: E1204 15:06:39.186757 4946 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.220:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e0b8bd56e9de9 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 15:06:39.184969193 +0000 UTC m=+250.071012834,LastTimestamp:2025-12-04 15:06:39.184969193 +0000 UTC m=+250.071012834,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.326164 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"40f0e7e75ac5f13a9ec8e293bf05a0de84bbd69f0270642973f6ba6360e967bd"} Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.333539 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.335449 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.336025 4946 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a" exitCode=0 Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.336056 4946 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4" exitCode=0 Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.336066 4946 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563" exitCode=0 Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.336078 4946 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d" exitCode=2 Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.336220 4946 scope.go:117] "RemoveContainer" containerID="f7fc871a25c542371d46433fd630e1f7fe1bc9b74949a12c7c4cfa5c0e3391df" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.338985 4946 generic.go:334] "Generic (PLEG): container finished" podID="2ba0b499-56a8-4e62-93cc-c2f2f1cad117" containerID="d205aefda6c8f7fa376bca235ccc7f6207a8f8cc183f29b35153baf2e39537b5" exitCode=0 Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.339830 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2ba0b499-56a8-4e62-93cc-c2f2f1cad117","Type":"ContainerDied","Data":"d205aefda6c8f7fa376bca235ccc7f6207a8f8cc183f29b35153baf2e39537b5"} Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.398013 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:06:39 crc kubenswrapper[4946]: I1204 15:06:39.399102 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:06:40 crc kubenswrapper[4946]: E1204 15:06:40.315494 4946 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:40 crc kubenswrapper[4946]: E1204 15:06:40.316159 4946 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:40 crc kubenswrapper[4946]: E1204 15:06:40.316438 4946 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:40 crc kubenswrapper[4946]: E1204 15:06:40.316718 4946 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:40 crc kubenswrapper[4946]: E1204 15:06:40.316939 4946 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.316968 4946 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 04 15:06:40 crc kubenswrapper[4946]: E1204 15:06:40.317210 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="200ms" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.345755 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"b6e0b8df59a29e319a03bfb1f0daf5a44cd07c8a86aa66d04139f437ae05963f"} Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.348544 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.401656 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.401727 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.448181 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:06:40 crc kubenswrapper[4946]: E1204 15:06:40.518539 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="400ms" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.618799 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.783254 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kube-api-access\") pod \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.783458 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kubelet-dir\") pod \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.783565 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-var-lock\") pod \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\" (UID: \"2ba0b499-56a8-4e62-93cc-c2f2f1cad117\") " Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.783621 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2ba0b499-56a8-4e62-93cc-c2f2f1cad117" (UID: "2ba0b499-56a8-4e62-93cc-c2f2f1cad117"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.783772 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-var-lock" (OuterVolumeSpecName: "var-lock") pod "2ba0b499-56a8-4e62-93cc-c2f2f1cad117" (UID: "2ba0b499-56a8-4e62-93cc-c2f2f1cad117"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.784056 4946 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.784082 4946 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-var-lock\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.791043 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2ba0b499-56a8-4e62-93cc-c2f2f1cad117" (UID: "2ba0b499-56a8-4e62-93cc-c2f2f1cad117"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.885274 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2ba0b499-56a8-4e62-93cc-c2f2f1cad117-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.904027 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.907962 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:06:40 crc kubenswrapper[4946]: E1204 15:06:40.919367 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="800ms" Dec 04 15:06:40 crc kubenswrapper[4946]: I1204 15:06:40.972579 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:06:41 crc kubenswrapper[4946]: I1204 15:06:41.356947 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2ba0b499-56a8-4e62-93cc-c2f2f1cad117","Type":"ContainerDied","Data":"fc8f7ca290516d6701e8f16d318288e6d672f516ae3df6920cc2ae9845889079"} Dec 04 15:06:41 crc kubenswrapper[4946]: I1204 15:06:41.356995 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc8f7ca290516d6701e8f16d318288e6d672f516ae3df6920cc2ae9845889079" Dec 04 15:06:41 crc kubenswrapper[4946]: I1204 15:06:41.357242 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 15:06:41 crc kubenswrapper[4946]: I1204 15:06:41.396432 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:06:41 crc kubenswrapper[4946]: I1204 15:06:41.406400 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:06:41 crc kubenswrapper[4946]: I1204 15:06:41.633601 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:06:41 crc kubenswrapper[4946]: I1204 15:06:41.633654 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:06:41 crc kubenswrapper[4946]: I1204 15:06:41.678486 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:06:41 crc kubenswrapper[4946]: E1204 15:06:41.720769 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="1.6s" Dec 04 15:06:41 crc kubenswrapper[4946]: E1204 15:06:41.961289 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:06:41Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:06:41Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:06:41Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:06:41Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:236f54f606e4761125208f1b925561799b1f1ded3a49853b04d16def380895d1\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:fbd4a05b983bcf97a0a3b9ad5840d8a00b85884db99cd10e7543330982cebf8c\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1610064757},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:001968e5793e539aa226501ec612860cdae08c10cd6945095caea889eb89f3af\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:79851cefc21d43b7712bccc5e4080660672e54f95b923304bac9b749ad8d3f66\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1205303638},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:9932555c9cde7a4fafbb069ced4e1874d584d0161ef1681710ba649966fa3aed\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:eb617b21d6eac1158f0772431048128991ac63dea611ddc67d9594df748921e7\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201438029},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:e8990432556acad31519b1a73ec32f32d27c2034cf9e5cc4db8980efc7331594\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:ebe9f523f5c211a3a0f2570331dddcd5be15b12c1fecd9b8b121f881bfaad029\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1129027903},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:41 crc kubenswrapper[4946]: E1204 15:06:41.962714 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:41 crc kubenswrapper[4946]: E1204 15:06:41.963047 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:41 crc kubenswrapper[4946]: E1204 15:06:41.963462 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:41 crc kubenswrapper[4946]: E1204 15:06:41.963629 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:41 crc kubenswrapper[4946]: E1204 15:06:41.963644 4946 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 15:06:42 crc kubenswrapper[4946]: E1204 15:06:42.049356 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-conmon-a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8.scope\": RecentStats: unable to find data in memory cache]" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.164516 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.164856 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.222274 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.302257 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.303322 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.369466 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.371479 4946 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8" exitCode=0 Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.371582 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.371674 4946 scope.go:117] "RemoveContainer" containerID="99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.396260 4946 scope.go:117] "RemoveContainer" containerID="3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.412973 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.413093 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.413149 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.413557 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.413610 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.413664 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.416079 4946 scope.go:117] "RemoveContainer" containerID="22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.420741 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.424486 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.434471 4946 scope.go:117] "RemoveContainer" containerID="a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.453652 4946 scope.go:117] "RemoveContainer" containerID="a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.470977 4946 scope.go:117] "RemoveContainer" containerID="e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.491725 4946 scope.go:117] "RemoveContainer" containerID="99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a" Dec 04 15:06:42 crc kubenswrapper[4946]: E1204 15:06:42.492249 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\": container with ID starting with 99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a not found: ID does not exist" containerID="99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.492285 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a"} err="failed to get container status \"99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\": rpc error: code = NotFound desc = could not find container \"99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a\": container with ID starting with 99596a94c057ebe26d9adf42c3cb87dba1ff6ab01224e796c5460bb13757467a not found: ID does not exist" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.492312 4946 scope.go:117] "RemoveContainer" containerID="3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4" Dec 04 15:06:42 crc kubenswrapper[4946]: E1204 15:06:42.493702 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\": container with ID starting with 3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4 not found: ID does not exist" containerID="3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.493767 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4"} err="failed to get container status \"3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\": rpc error: code = NotFound desc = could not find container \"3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4\": container with ID starting with 3845ef65c5b078cc72be824c635c8ed01323103d389bd93eac317a1f8bcdaaa4 not found: ID does not exist" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.493810 4946 scope.go:117] "RemoveContainer" containerID="22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563" Dec 04 15:06:42 crc kubenswrapper[4946]: E1204 15:06:42.494271 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\": container with ID starting with 22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563 not found: ID does not exist" containerID="22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.494304 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563"} err="failed to get container status \"22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\": rpc error: code = NotFound desc = could not find container \"22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563\": container with ID starting with 22d50d7b7454e0bd5b67ca10d9f9f0b8f78aefe30b7db5b7ec1a5818da237563 not found: ID does not exist" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.494323 4946 scope.go:117] "RemoveContainer" containerID="a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d" Dec 04 15:06:42 crc kubenswrapper[4946]: E1204 15:06:42.495229 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\": container with ID starting with a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d not found: ID does not exist" containerID="a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.495264 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d"} err="failed to get container status \"a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\": rpc error: code = NotFound desc = could not find container \"a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d\": container with ID starting with a512fd2400e2411245177eda6fa587d0221173753b600a9546b8061bf5992f0d not found: ID does not exist" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.495282 4946 scope.go:117] "RemoveContainer" containerID="a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8" Dec 04 15:06:42 crc kubenswrapper[4946]: E1204 15:06:42.495884 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\": container with ID starting with a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8 not found: ID does not exist" containerID="a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.495931 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8"} err="failed to get container status \"a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\": rpc error: code = NotFound desc = could not find container \"a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8\": container with ID starting with a109b9d3b46d17ea17baf3038df7f0e12e696085459888b5b1c021ca9a4c44f8 not found: ID does not exist" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.495959 4946 scope.go:117] "RemoveContainer" containerID="e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964" Dec 04 15:06:42 crc kubenswrapper[4946]: E1204 15:06:42.496406 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\": container with ID starting with e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964 not found: ID does not exist" containerID="e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.496437 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964"} err="failed to get container status \"e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\": rpc error: code = NotFound desc = could not find container \"e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964\": container with ID starting with e3b6783ae938ab1ba028f2b6fca90549d28fe58c3cd7bba5813f4e2985350964 not found: ID does not exist" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.515201 4946 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.515286 4946 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:42 crc kubenswrapper[4946]: I1204 15:06:42.515298 4946 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 04 15:06:43 crc kubenswrapper[4946]: E1204 15:06:43.321930 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="3.2s" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.461140 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.883973 4946 status_manager.go:851] "Failed to get status for pod" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" pod="openshift-marketplace/certified-operators-7mzks" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7mzks\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.884780 4946 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.888601 4946 status_manager.go:851] "Failed to get status for pod" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" pod="openshift-marketplace/certified-operators-7mzks" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7mzks\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.889581 4946 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.890062 4946 status_manager.go:851] "Failed to get status for pod" podUID="fec9d9bd-a20b-4625-9070-19949999c206" pod="openshift-marketplace/community-operators-df2mv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-df2mv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.890434 4946 status_manager.go:851] "Failed to get status for pod" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" pod="openshift-marketplace/redhat-operators-kr77j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kr77j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.890799 4946 status_manager.go:851] "Failed to get status for pod" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" pod="openshift-marketplace/redhat-operators-m666j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m666j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.891033 4946 status_manager.go:851] "Failed to get status for pod" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" pod="openshift-marketplace/redhat-marketplace-vrlxj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vrlxj\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.891404 4946 status_manager.go:851] "Failed to get status for pod" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" pod="openshift-marketplace/certified-operators-8mgfv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8mgfv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.891835 4946 status_manager.go:851] "Failed to get status for pod" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" pod="openshift-marketplace/redhat-marketplace-p65s4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-p65s4\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.892179 4946 status_manager.go:851] "Failed to get status for pod" podUID="2ba0b499-56a8-4e62-93cc-c2f2f1cad117" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.892776 4946 status_manager.go:851] "Failed to get status for pod" podUID="fec9d9bd-a20b-4625-9070-19949999c206" pod="openshift-marketplace/community-operators-df2mv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-df2mv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.893081 4946 status_manager.go:851] "Failed to get status for pod" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" pod="openshift-marketplace/redhat-operators-kr77j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kr77j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.893407 4946 status_manager.go:851] "Failed to get status for pod" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" pod="openshift-marketplace/redhat-operators-m666j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m666j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.900571 4946 status_manager.go:851] "Failed to get status for pod" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" pod="openshift-marketplace/redhat-marketplace-vrlxj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vrlxj\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.902816 4946 status_manager.go:851] "Failed to get status for pod" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" pod="openshift-marketplace/certified-operators-8mgfv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8mgfv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.903366 4946 status_manager.go:851] "Failed to get status for pod" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" pod="openshift-marketplace/redhat-marketplace-p65s4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-p65s4\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.904035 4946 status_manager.go:851] "Failed to get status for pod" podUID="2ba0b499-56a8-4e62-93cc-c2f2f1cad117" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.904733 4946 status_manager.go:851] "Failed to get status for pod" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" pod="openshift-marketplace/certified-operators-7mzks" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7mzks\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:43 crc kubenswrapper[4946]: I1204 15:06:43.905460 4946 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:46 crc kubenswrapper[4946]: E1204 15:06:46.523894 4946 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="6.4s" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.025777 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.027188 4946 status_manager.go:851] "Failed to get status for pod" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" pod="openshift-marketplace/certified-operators-7mzks" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7mzks\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.027726 4946 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.028463 4946 status_manager.go:851] "Failed to get status for pod" podUID="fec9d9bd-a20b-4625-9070-19949999c206" pod="openshift-marketplace/community-operators-df2mv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-df2mv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.029167 4946 status_manager.go:851] "Failed to get status for pod" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" pod="openshift-marketplace/redhat-operators-m666j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m666j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.029536 4946 status_manager.go:851] "Failed to get status for pod" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" pod="openshift-marketplace/redhat-operators-kr77j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kr77j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.029914 4946 status_manager.go:851] "Failed to get status for pod" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" pod="openshift-marketplace/redhat-marketplace-vrlxj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vrlxj\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.030367 4946 status_manager.go:851] "Failed to get status for pod" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" pod="openshift-marketplace/certified-operators-8mgfv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8mgfv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.030785 4946 status_manager.go:851] "Failed to get status for pod" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" pod="openshift-marketplace/redhat-marketplace-p65s4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-p65s4\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.031195 4946 status_manager.go:851] "Failed to get status for pod" podUID="2ba0b499-56a8-4e62-93cc-c2f2f1cad117" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: E1204 15:06:49.061881 4946 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.220:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e0b8bd56e9de9 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 15:06:39.184969193 +0000 UTC m=+250.071012834,LastTimestamp:2025-12-04 15:06:39.184969193 +0000 UTC m=+250.071012834,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.456756 4946 status_manager.go:851] "Failed to get status for pod" podUID="2ba0b499-56a8-4e62-93cc-c2f2f1cad117" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.457424 4946 status_manager.go:851] "Failed to get status for pod" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" pod="openshift-marketplace/certified-operators-7mzks" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7mzks\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.457865 4946 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.458448 4946 status_manager.go:851] "Failed to get status for pod" podUID="fec9d9bd-a20b-4625-9070-19949999c206" pod="openshift-marketplace/community-operators-df2mv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-df2mv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.458714 4946 status_manager.go:851] "Failed to get status for pod" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" pod="openshift-marketplace/redhat-operators-kr77j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kr77j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.458995 4946 status_manager.go:851] "Failed to get status for pod" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" pod="openshift-marketplace/redhat-operators-m666j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m666j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.459347 4946 status_manager.go:851] "Failed to get status for pod" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" pod="openshift-marketplace/redhat-marketplace-vrlxj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vrlxj\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.459718 4946 status_manager.go:851] "Failed to get status for pod" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" pod="openshift-marketplace/certified-operators-8mgfv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8mgfv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:49 crc kubenswrapper[4946]: I1204 15:06:49.460043 4946 status_manager.go:851] "Failed to get status for pod" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" pod="openshift-marketplace/redhat-marketplace-p65s4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-p65s4\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.452693 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.453423 4946 status_manager.go:851] "Failed to get status for pod" podUID="fec9d9bd-a20b-4625-9070-19949999c206" pod="openshift-marketplace/community-operators-df2mv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-df2mv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.454014 4946 status_manager.go:851] "Failed to get status for pod" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" pod="openshift-marketplace/redhat-operators-kr77j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kr77j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.454363 4946 status_manager.go:851] "Failed to get status for pod" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" pod="openshift-marketplace/redhat-operators-m666j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m666j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.454646 4946 status_manager.go:851] "Failed to get status for pod" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" pod="openshift-marketplace/redhat-marketplace-vrlxj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vrlxj\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.455239 4946 status_manager.go:851] "Failed to get status for pod" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" pod="openshift-marketplace/certified-operators-8mgfv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8mgfv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.455657 4946 status_manager.go:851] "Failed to get status for pod" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" pod="openshift-marketplace/redhat-marketplace-p65s4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-p65s4\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.455926 4946 status_manager.go:851] "Failed to get status for pod" podUID="2ba0b499-56a8-4e62-93cc-c2f2f1cad117" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.456234 4946 status_manager.go:851] "Failed to get status for pod" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" pod="openshift-marketplace/certified-operators-7mzks" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7mzks\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.456521 4946 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.469809 4946 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5b7f0724-c253-4acc-8909-ca111112af4e" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.469858 4946 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5b7f0724-c253-4acc-8909-ca111112af4e" Dec 04 15:06:50 crc kubenswrapper[4946]: E1204 15:06:50.470317 4946 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:50 crc kubenswrapper[4946]: I1204 15:06:50.470896 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:50 crc kubenswrapper[4946]: W1204 15:06:50.494034 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-b654c4a1f8234ef70c634e321133e0b13953a1dcfae7c61b030a35c09eb58031 WatchSource:0}: Error finding container b654c4a1f8234ef70c634e321133e0b13953a1dcfae7c61b030a35c09eb58031: Status 404 returned error can't find the container with id b654c4a1f8234ef70c634e321133e0b13953a1dcfae7c61b030a35c09eb58031 Dec 04 15:06:51 crc kubenswrapper[4946]: I1204 15:06:51.423584 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b654c4a1f8234ef70c634e321133e0b13953a1dcfae7c61b030a35c09eb58031"} Dec 04 15:06:52 crc kubenswrapper[4946]: E1204 15:06:52.207834 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:06:52Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:06:52Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:06:52Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T15:06:52Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:236f54f606e4761125208f1b925561799b1f1ded3a49853b04d16def380895d1\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:fbd4a05b983bcf97a0a3b9ad5840d8a00b85884db99cd10e7543330982cebf8c\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1610064757},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:001968e5793e539aa226501ec612860cdae08c10cd6945095caea889eb89f3af\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:79851cefc21d43b7712bccc5e4080660672e54f95b923304bac9b749ad8d3f66\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1205303638},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:9932555c9cde7a4fafbb069ced4e1874d584d0161ef1681710ba649966fa3aed\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:eb617b21d6eac1158f0772431048128991ac63dea611ddc67d9594df748921e7\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201438029},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:e8990432556acad31519b1a73ec32f32d27c2034cf9e5cc4db8980efc7331594\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:ebe9f523f5c211a3a0f2570331dddcd5be15b12c1fecd9b8b121f881bfaad029\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1129027903},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: E1204 15:06:52.208753 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: E1204 15:06:52.209190 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: E1204 15:06:52.209452 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: E1204 15:06:52.209714 4946 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: E1204 15:06:52.209735 4946 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.433148 4946 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="99f30b33910b90cfb85a97b006b567c3b10aef58147158c7c6bbff6e8acc73bd" exitCode=0 Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.433222 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"99f30b33910b90cfb85a97b006b567c3b10aef58147158c7c6bbff6e8acc73bd"} Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.433480 4946 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5b7f0724-c253-4acc-8909-ca111112af4e" Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.433507 4946 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5b7f0724-c253-4acc-8909-ca111112af4e" Dec 04 15:06:52 crc kubenswrapper[4946]: E1204 15:06:52.434061 4946 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.434314 4946 status_manager.go:851] "Failed to get status for pod" podUID="fec9d9bd-a20b-4625-9070-19949999c206" pod="openshift-marketplace/community-operators-df2mv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-df2mv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.434722 4946 status_manager.go:851] "Failed to get status for pod" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" pod="openshift-marketplace/redhat-operators-kr77j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kr77j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.435041 4946 status_manager.go:851] "Failed to get status for pod" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" pod="openshift-marketplace/redhat-operators-m666j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m666j\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.435286 4946 status_manager.go:851] "Failed to get status for pod" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" pod="openshift-marketplace/redhat-marketplace-vrlxj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vrlxj\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.435585 4946 status_manager.go:851] "Failed to get status for pod" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" pod="openshift-marketplace/certified-operators-8mgfv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-8mgfv\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.435965 4946 status_manager.go:851] "Failed to get status for pod" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" pod="openshift-marketplace/redhat-marketplace-p65s4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-p65s4\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.436284 4946 status_manager.go:851] "Failed to get status for pod" podUID="2ba0b499-56a8-4e62-93cc-c2f2f1cad117" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.436596 4946 status_manager.go:851] "Failed to get status for pod" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" pod="openshift-marketplace/certified-operators-7mzks" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7mzks\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:52 crc kubenswrapper[4946]: I1204 15:06:52.436880 4946 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Dec 04 15:06:53 crc kubenswrapper[4946]: I1204 15:06:53.444660 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0c7df72de2667afffbc45c0c16fc648bbc0cd94ce167840203490a02a154903f"} Dec 04 15:06:53 crc kubenswrapper[4946]: I1204 15:06:53.445215 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2db026a24eab70d0ab86037d9a7c807d7c90ac5660c75c89988356a10a7be427"} Dec 04 15:06:53 crc kubenswrapper[4946]: I1204 15:06:53.445232 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"18098c38c9916011d0303132915ff0efdae069ddf017f38613964bdd3d16e794"} Dec 04 15:06:53 crc kubenswrapper[4946]: I1204 15:06:53.445245 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"92a40a51dd3817177d49697e9db8b12ed203ec2a07df7da8123f323ad83ac945"} Dec 04 15:06:54 crc kubenswrapper[4946]: I1204 15:06:54.453699 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 04 15:06:54 crc kubenswrapper[4946]: I1204 15:06:54.454537 4946 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f" exitCode=1 Dec 04 15:06:54 crc kubenswrapper[4946]: I1204 15:06:54.454636 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f"} Dec 04 15:06:54 crc kubenswrapper[4946]: I1204 15:06:54.455333 4946 scope.go:117] "RemoveContainer" containerID="5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f" Dec 04 15:06:54 crc kubenswrapper[4946]: I1204 15:06:54.460437 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"82875363982d99a21eb55d5a13494ef85dcbe337fa9e23424381548954b58c53"} Dec 04 15:06:54 crc kubenswrapper[4946]: I1204 15:06:54.460664 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:54 crc kubenswrapper[4946]: I1204 15:06:54.460757 4946 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5b7f0724-c253-4acc-8909-ca111112af4e" Dec 04 15:06:54 crc kubenswrapper[4946]: I1204 15:06:54.460794 4946 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5b7f0724-c253-4acc-8909-ca111112af4e" Dec 04 15:06:55 crc kubenswrapper[4946]: I1204 15:06:55.468648 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 04 15:06:55 crc kubenswrapper[4946]: I1204 15:06:55.471014 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"dea2988fe3b8f6ecb0043335a55301daecdb9c94aa4d61a902f28bbfb7f12aed"} Dec 04 15:06:55 crc kubenswrapper[4946]: I1204 15:06:55.471425 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:55 crc kubenswrapper[4946]: I1204 15:06:55.471531 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:55 crc kubenswrapper[4946]: I1204 15:06:55.477182 4946 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]log ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]etcd ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/openshift.io-api-request-count-filter ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/openshift.io-startkubeinformers ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/generic-apiserver-start-informers ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/priority-and-fairness-config-consumer ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/priority-and-fairness-filter ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/start-apiextensions-informers ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/start-apiextensions-controllers ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/crd-informer-synced ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/start-system-namespaces-controller ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/start-cluster-authentication-info-controller ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/start-legacy-token-tracking-controller ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/start-service-ip-repair-controllers ok Dec 04 15:06:55 crc kubenswrapper[4946]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/scheduling/bootstrap-system-priority-classes ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/priority-and-fairness-config-producer ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/bootstrap-controller ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/start-kube-aggregator-informers ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/apiservice-status-local-available-controller ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/apiservice-status-remote-available-controller ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/apiservice-registration-controller ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/apiservice-wait-for-first-sync ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/apiservice-discovery-controller ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/kube-apiserver-autoregistration ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]autoregister-completion ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/apiservice-openapi-controller ok Dec 04 15:06:55 crc kubenswrapper[4946]: [+]poststarthook/apiservice-openapiv3-controller ok Dec 04 15:06:55 crc kubenswrapper[4946]: livez check failed Dec 04 15:06:55 crc kubenswrapper[4946]: I1204 15:06:55.477233 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 15:06:55 crc kubenswrapper[4946]: I1204 15:06:55.924674 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:06:59 crc kubenswrapper[4946]: I1204 15:06:59.475391 4946 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:06:59 crc kubenswrapper[4946]: I1204 15:06:59.499261 4946 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5b7f0724-c253-4acc-8909-ca111112af4e" Dec 04 15:06:59 crc kubenswrapper[4946]: I1204 15:06:59.499302 4946 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5b7f0724-c253-4acc-8909-ca111112af4e" Dec 04 15:06:59 crc kubenswrapper[4946]: I1204 15:06:59.727109 4946 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="0e6f60e8-20e1-44e8-a7d1-c4aa24e7896c" Dec 04 15:07:03 crc kubenswrapper[4946]: I1204 15:07:03.575253 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:07:03 crc kubenswrapper[4946]: I1204 15:07:03.575731 4946 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 04 15:07:03 crc kubenswrapper[4946]: I1204 15:07:03.576043 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 04 15:07:09 crc kubenswrapper[4946]: I1204 15:07:09.369737 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 04 15:07:09 crc kubenswrapper[4946]: I1204 15:07:09.721973 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 04 15:07:09 crc kubenswrapper[4946]: I1204 15:07:09.731025 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 04 15:07:09 crc kubenswrapper[4946]: I1204 15:07:09.983787 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 04 15:07:10 crc kubenswrapper[4946]: I1204 15:07:10.022671 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 04 15:07:10 crc kubenswrapper[4946]: I1204 15:07:10.683479 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 04 15:07:10 crc kubenswrapper[4946]: I1204 15:07:10.911837 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 04 15:07:10 crc kubenswrapper[4946]: I1204 15:07:10.952542 4946 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 04 15:07:10 crc kubenswrapper[4946]: I1204 15:07:10.978503 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 04 15:07:11 crc kubenswrapper[4946]: I1204 15:07:11.348464 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 04 15:07:11 crc kubenswrapper[4946]: I1204 15:07:11.402348 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 04 15:07:11 crc kubenswrapper[4946]: I1204 15:07:11.556231 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 04 15:07:11 crc kubenswrapper[4946]: I1204 15:07:11.626551 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 04 15:07:11 crc kubenswrapper[4946]: I1204 15:07:11.683308 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 04 15:07:11 crc kubenswrapper[4946]: I1204 15:07:11.754313 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 04 15:07:12 crc kubenswrapper[4946]: I1204 15:07:12.025586 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 04 15:07:12 crc kubenswrapper[4946]: I1204 15:07:12.097183 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 04 15:07:12 crc kubenswrapper[4946]: I1204 15:07:12.223953 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 04 15:07:12 crc kubenswrapper[4946]: I1204 15:07:12.237340 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 04 15:07:12 crc kubenswrapper[4946]: I1204 15:07:12.251174 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 04 15:07:12 crc kubenswrapper[4946]: I1204 15:07:12.381447 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 04 15:07:12 crc kubenswrapper[4946]: I1204 15:07:12.428771 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 04 15:07:12 crc kubenswrapper[4946]: I1204 15:07:12.524939 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 04 15:07:12 crc kubenswrapper[4946]: I1204 15:07:12.839867 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 04 15:07:12 crc kubenswrapper[4946]: I1204 15:07:12.852329 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 04 15:07:12 crc kubenswrapper[4946]: I1204 15:07:12.857209 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.050175 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.074924 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.150780 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.168486 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.188791 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.281255 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.326027 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.345915 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.353190 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.364828 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.386464 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.444820 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.454664 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.470418 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.497779 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.524832 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.554219 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.576205 4946 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.576277 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.596262 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.614052 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.629178 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.754231 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.804442 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.807342 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.832755 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.852668 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.891745 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.891903 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 04 15:07:13 crc kubenswrapper[4946]: I1204 15:07:13.924424 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 04 15:07:14 crc kubenswrapper[4946]: I1204 15:07:14.185759 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 04 15:07:14 crc kubenswrapper[4946]: I1204 15:07:14.276939 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 04 15:07:14 crc kubenswrapper[4946]: I1204 15:07:14.286535 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 04 15:07:14 crc kubenswrapper[4946]: I1204 15:07:14.298327 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 04 15:07:14 crc kubenswrapper[4946]: I1204 15:07:14.420915 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 04 15:07:14 crc kubenswrapper[4946]: I1204 15:07:14.499259 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 04 15:07:14 crc kubenswrapper[4946]: I1204 15:07:14.603363 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 04 15:07:14 crc kubenswrapper[4946]: I1204 15:07:14.646599 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 04 15:07:14 crc kubenswrapper[4946]: I1204 15:07:14.683947 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 04 15:07:14 crc kubenswrapper[4946]: I1204 15:07:14.896619 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 04 15:07:14 crc kubenswrapper[4946]: I1204 15:07:14.962425 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.014465 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.075358 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.185786 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.276381 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.287675 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.385940 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.493057 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.548290 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.550752 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.577781 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.662012 4946 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.665472 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.712723 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.753838 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.792141 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.835762 4946 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.845914 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 04 15:07:15 crc kubenswrapper[4946]: I1204 15:07:15.888869 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.122032 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.243237 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.280030 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.285368 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.295490 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.325130 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.383407 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.391524 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.393412 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.428267 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.545557 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.553786 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.586669 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.603988 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.663154 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.669155 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.724284 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.812896 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.846958 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 04 15:07:16 crc kubenswrapper[4946]: I1204 15:07:16.850441 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.104958 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.187617 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.234600 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.312788 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.320442 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.355243 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.421363 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.438053 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.448412 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.472263 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.524632 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.816253 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.872471 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.886909 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.889354 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.949571 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 04 15:07:17 crc kubenswrapper[4946]: I1204 15:07:17.962193 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.000282 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.033382 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.138167 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.145395 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.174499 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.219308 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.227096 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.241558 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.383195 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.403511 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.444338 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.546321 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.550387 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.571438 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.651693 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.662752 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.913670 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.916867 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.927083 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 04 15:07:18 crc kubenswrapper[4946]: I1204 15:07:18.950244 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.007218 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.027513 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.047911 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.119031 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.145388 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.163890 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.176554 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.181094 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.196957 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.214958 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.225678 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.243224 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.272510 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.298470 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.312264 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.316850 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.358921 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.376691 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.411693 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.545628 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.621312 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.666059 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.697292 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.769529 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 04 15:07:19 crc kubenswrapper[4946]: I1204 15:07:19.785259 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.041663 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.120763 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.142306 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.143373 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.169579 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.206586 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.215630 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.288809 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.348537 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.385140 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.454461 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.513369 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.634255 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.704413 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.725995 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.727705 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.738293 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.785013 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.800703 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.820313 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.832304 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.835352 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.906421 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.946895 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 04 15:07:20 crc kubenswrapper[4946]: I1204 15:07:20.989406 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.045622 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.057755 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.092726 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.189361 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.295586 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.303350 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.315352 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.379904 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.387779 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.401998 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.434364 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.462400 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.531264 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.534469 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.770253 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.808623 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.809002 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.895750 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.913682 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.914170 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 04 15:07:21 crc kubenswrapper[4946]: I1204 15:07:21.965837 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.034764 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.046858 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.148244 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.224746 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.232972 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.260137 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.475362 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.633838 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.667254 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.721647 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.730812 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.731902 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 04 15:07:22 crc kubenswrapper[4946]: I1204 15:07:22.848743 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.006553 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.153856 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.171800 4946 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.198349 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.209326 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.377396 4946 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.381571 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=45.381555221 podStartE2EDuration="45.381555221s" podCreationTimestamp="2025-12-04 15:06:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:06:59.523877877 +0000 UTC m=+270.409921518" watchObservedRunningTime="2025-12-04 15:07:23.381555221 +0000 UTC m=+294.267598862" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.381945 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.381993 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.390658 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.407348 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=24.407326008 podStartE2EDuration="24.407326008s" podCreationTimestamp="2025-12-04 15:06:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:07:23.402808516 +0000 UTC m=+294.288852157" watchObservedRunningTime="2025-12-04 15:07:23.407326008 +0000 UTC m=+294.293369659" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.416651 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.576285 4946 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.576388 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.576476 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.577558 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"dea2988fe3b8f6ecb0043335a55301daecdb9c94aa4d61a902f28bbfb7f12aed"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.577807 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://dea2988fe3b8f6ecb0043335a55301daecdb9c94aa4d61a902f28bbfb7f12aed" gracePeriod=30 Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.580052 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.592677 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.652894 4946 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.788489 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 04 15:07:23 crc kubenswrapper[4946]: I1204 15:07:23.964906 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 04 15:07:24 crc kubenswrapper[4946]: I1204 15:07:24.309872 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 04 15:07:24 crc kubenswrapper[4946]: I1204 15:07:24.369067 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 04 15:07:24 crc kubenswrapper[4946]: I1204 15:07:24.369593 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 04 15:07:24 crc kubenswrapper[4946]: I1204 15:07:24.470520 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 04 15:07:24 crc kubenswrapper[4946]: I1204 15:07:24.616952 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 04 15:07:24 crc kubenswrapper[4946]: I1204 15:07:24.693009 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 04 15:07:24 crc kubenswrapper[4946]: I1204 15:07:24.829524 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 04 15:07:25 crc kubenswrapper[4946]: I1204 15:07:25.023526 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 04 15:07:25 crc kubenswrapper[4946]: I1204 15:07:25.107111 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 04 15:07:25 crc kubenswrapper[4946]: I1204 15:07:25.262456 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 04 15:07:25 crc kubenswrapper[4946]: I1204 15:07:25.268988 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 04 15:07:25 crc kubenswrapper[4946]: I1204 15:07:25.475367 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:07:25 crc kubenswrapper[4946]: I1204 15:07:25.480457 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 15:07:25 crc kubenswrapper[4946]: I1204 15:07:25.524444 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 04 15:07:26 crc kubenswrapper[4946]: I1204 15:07:26.062270 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 04 15:07:26 crc kubenswrapper[4946]: I1204 15:07:26.401507 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 04 15:07:26 crc kubenswrapper[4946]: I1204 15:07:26.433366 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 04 15:07:27 crc kubenswrapper[4946]: I1204 15:07:27.051187 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 04 15:07:29 crc kubenswrapper[4946]: I1204 15:07:29.300683 4946 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 04 15:07:33 crc kubenswrapper[4946]: I1204 15:07:33.345009 4946 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 15:07:33 crc kubenswrapper[4946]: I1204 15:07:33.345992 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://b6e0b8df59a29e319a03bfb1f0daf5a44cd07c8a86aa66d04139f437ae05963f" gracePeriod=5 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.273975 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7mzks"] Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.274612 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7mzks" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" containerName="registry-server" containerID="cri-o://e992c766db9d9d20751838d21dac482aa691c4f5bd717b1a7a4ebe0bf98b7d39" gracePeriod=30 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.307739 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8mgfv"] Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.308202 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8mgfv" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" containerName="registry-server" containerID="cri-o://f13e1ed4d91b07976e087c3006e87b65ea0cb094f53b49799c938467c66bca8f" gracePeriod=30 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.315934 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-df2mv"] Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.316262 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-df2mv" podUID="fec9d9bd-a20b-4625-9070-19949999c206" containerName="registry-server" containerID="cri-o://8ff5d0de19a536206b38267bfa2e1cf79b4b48c7f8f8c48050e119d0b8e3d59a" gracePeriod=30 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.326337 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4t8l"] Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.326554 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" podUID="0b12867e-de02-4b45-ac09-5140aab7451e" containerName="marketplace-operator" containerID="cri-o://dd5749d0eba0fadc137ed3019b517bc7f496580ddde20187785b7ceef42904cf" gracePeriod=30 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.334518 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p65s4"] Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.335457 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p65s4" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" containerName="registry-server" containerID="cri-o://417a02e48c8d926d32b45be48cbe68d8ee5db50498770d81bfe1c02efa84c7e7" gracePeriod=30 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.340970 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlxj"] Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.341207 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vrlxj" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" containerName="registry-server" containerID="cri-o://04c5e1e86ce28602ac8333d7f6be01a97007faa7102cee140fae2de6453a9b26" gracePeriod=30 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.346227 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kr77j"] Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.346444 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kr77j" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" containerName="registry-server" containerID="cri-o://8a1eaff474378f6984f2824129d2075fcd592707a9632c8751552b805e03a97a" gracePeriod=30 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.351696 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m666j"] Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.352054 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m666j" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" containerName="registry-server" containerID="cri-o://6dd2e8d46022b0e9908a3b1243d4d720615730d5c0386bf24fb4257abb30ace3" gracePeriod=30 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.786590 4946 generic.go:334] "Generic (PLEG): container finished" podID="fec9d9bd-a20b-4625-9070-19949999c206" containerID="8ff5d0de19a536206b38267bfa2e1cf79b4b48c7f8f8c48050e119d0b8e3d59a" exitCode=0 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.787004 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-df2mv" event={"ID":"fec9d9bd-a20b-4625-9070-19949999c206","Type":"ContainerDied","Data":"8ff5d0de19a536206b38267bfa2e1cf79b4b48c7f8f8c48050e119d0b8e3d59a"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.787370 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-df2mv" event={"ID":"fec9d9bd-a20b-4625-9070-19949999c206","Type":"ContainerDied","Data":"464c187576d5d7a083a8305483aed39b775960980b681b0f6b7ccb91bae11042"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.787408 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="464c187576d5d7a083a8305483aed39b775960980b681b0f6b7ccb91bae11042" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.799976 4946 generic.go:334] "Generic (PLEG): container finished" podID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" containerID="417a02e48c8d926d32b45be48cbe68d8ee5db50498770d81bfe1c02efa84c7e7" exitCode=0 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.800076 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p65s4" event={"ID":"9c9bd510-5d62-4814-bd88-62c5a3051f9d","Type":"ContainerDied","Data":"417a02e48c8d926d32b45be48cbe68d8ee5db50498770d81bfe1c02efa84c7e7"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.804338 4946 generic.go:334] "Generic (PLEG): container finished" podID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" containerID="e992c766db9d9d20751838d21dac482aa691c4f5bd717b1a7a4ebe0bf98b7d39" exitCode=0 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.804419 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mzks" event={"ID":"73b58e95-46d5-468b-9890-a4fc3c5a0bde","Type":"ContainerDied","Data":"e992c766db9d9d20751838d21dac482aa691c4f5bd717b1a7a4ebe0bf98b7d39"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.804456 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mzks" event={"ID":"73b58e95-46d5-468b-9890-a4fc3c5a0bde","Type":"ContainerDied","Data":"5c906136ec926c0b895114e8943d633858caae888c00d50dd48d8ec2ed22cf2c"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.804471 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c906136ec926c0b895114e8943d633858caae888c00d50dd48d8ec2ed22cf2c" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.806191 4946 generic.go:334] "Generic (PLEG): container finished" podID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" containerID="8a1eaff474378f6984f2824129d2075fcd592707a9632c8751552b805e03a97a" exitCode=0 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.806254 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr77j" event={"ID":"74c8940c-1ed3-4aaa-94aa-0623f25f008e","Type":"ContainerDied","Data":"8a1eaff474378f6984f2824129d2075fcd592707a9632c8751552b805e03a97a"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.806344 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr77j" event={"ID":"74c8940c-1ed3-4aaa-94aa-0623f25f008e","Type":"ContainerDied","Data":"049ce5875fd860851cfbb3f16e41aaa5a46080273d386b5f20bd2afaa03c1f8a"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.806365 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="049ce5875fd860851cfbb3f16e41aaa5a46080273d386b5f20bd2afaa03c1f8a" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.809423 4946 generic.go:334] "Generic (PLEG): container finished" podID="7a98b449-5e32-4f53-8829-fc2d01b603b8" containerID="04c5e1e86ce28602ac8333d7f6be01a97007faa7102cee140fae2de6453a9b26" exitCode=0 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.809467 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlxj" event={"ID":"7a98b449-5e32-4f53-8829-fc2d01b603b8","Type":"ContainerDied","Data":"04c5e1e86ce28602ac8333d7f6be01a97007faa7102cee140fae2de6453a9b26"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.811503 4946 generic.go:334] "Generic (PLEG): container finished" podID="0b12867e-de02-4b45-ac09-5140aab7451e" containerID="dd5749d0eba0fadc137ed3019b517bc7f496580ddde20187785b7ceef42904cf" exitCode=0 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.811644 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" event={"ID":"0b12867e-de02-4b45-ac09-5140aab7451e","Type":"ContainerDied","Data":"dd5749d0eba0fadc137ed3019b517bc7f496580ddde20187785b7ceef42904cf"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.811737 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" event={"ID":"0b12867e-de02-4b45-ac09-5140aab7451e","Type":"ContainerDied","Data":"54e9595356168641523c2224b47f7c5a365f36d1a7df4038eef9589779fdd0cf"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.811768 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54e9595356168641523c2224b47f7c5a365f36d1a7df4038eef9589779fdd0cf" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.826635 4946 generic.go:334] "Generic (PLEG): container finished" podID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" containerID="6dd2e8d46022b0e9908a3b1243d4d720615730d5c0386bf24fb4257abb30ace3" exitCode=0 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.826733 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m666j" event={"ID":"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5","Type":"ContainerDied","Data":"6dd2e8d46022b0e9908a3b1243d4d720615730d5c0386bf24fb4257abb30ace3"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.829982 4946 generic.go:334] "Generic (PLEG): container finished" podID="8c9f7504-90bf-4e33-be97-43f5d81896ae" containerID="f13e1ed4d91b07976e087c3006e87b65ea0cb094f53b49799c938467c66bca8f" exitCode=0 Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.830030 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8mgfv" event={"ID":"8c9f7504-90bf-4e33-be97-43f5d81896ae","Type":"ContainerDied","Data":"f13e1ed4d91b07976e087c3006e87b65ea0cb094f53b49799c938467c66bca8f"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.830056 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8mgfv" event={"ID":"8c9f7504-90bf-4e33-be97-43f5d81896ae","Type":"ContainerDied","Data":"21adf73d36c612f99c8e2fb06eac64f367075b93414294c87475cf632e214281"} Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.830100 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21adf73d36c612f99c8e2fb06eac64f367075b93414294c87475cf632e214281" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.898381 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.912617 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.934591 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.936043 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.947596 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.957877 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.976995 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:07:34 crc kubenswrapper[4946]: I1204 15:07:34.978994 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.015588 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-utilities\") pod \"8c9f7504-90bf-4e33-be97-43f5d81896ae\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.015667 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8gdn\" (UniqueName: \"kubernetes.io/projected/73b58e95-46d5-468b-9890-a4fc3c5a0bde-kube-api-access-m8gdn\") pod \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.015855 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-utilities\") pod \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.015897 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5jk2\" (UniqueName: \"kubernetes.io/projected/8c9f7504-90bf-4e33-be97-43f5d81896ae-kube-api-access-t5jk2\") pod \"8c9f7504-90bf-4e33-be97-43f5d81896ae\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.015932 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-catalog-content\") pod \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\" (UID: \"73b58e95-46d5-468b-9890-a4fc3c5a0bde\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.015956 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-catalog-content\") pod \"8c9f7504-90bf-4e33-be97-43f5d81896ae\" (UID: \"8c9f7504-90bf-4e33-be97-43f5d81896ae\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.016642 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-utilities" (OuterVolumeSpecName: "utilities") pod "8c9f7504-90bf-4e33-be97-43f5d81896ae" (UID: "8c9f7504-90bf-4e33-be97-43f5d81896ae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.019462 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-utilities" (OuterVolumeSpecName: "utilities") pod "73b58e95-46d5-468b-9890-a4fc3c5a0bde" (UID: "73b58e95-46d5-468b-9890-a4fc3c5a0bde"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.027225 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c9f7504-90bf-4e33-be97-43f5d81896ae-kube-api-access-t5jk2" (OuterVolumeSpecName: "kube-api-access-t5jk2") pod "8c9f7504-90bf-4e33-be97-43f5d81896ae" (UID: "8c9f7504-90bf-4e33-be97-43f5d81896ae"). InnerVolumeSpecName "kube-api-access-t5jk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.035730 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73b58e95-46d5-468b-9890-a4fc3c5a0bde-kube-api-access-m8gdn" (OuterVolumeSpecName: "kube-api-access-m8gdn") pod "73b58e95-46d5-468b-9890-a4fc3c5a0bde" (UID: "73b58e95-46d5-468b-9890-a4fc3c5a0bde"). InnerVolumeSpecName "kube-api-access-m8gdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.077518 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "73b58e95-46d5-468b-9890-a4fc3c5a0bde" (UID: "73b58e95-46d5-468b-9890-a4fc3c5a0bde"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.080981 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c9f7504-90bf-4e33-be97-43f5d81896ae" (UID: "8c9f7504-90bf-4e33-be97-43f5d81896ae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.117032 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46l8s\" (UniqueName: \"kubernetes.io/projected/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-kube-api-access-46l8s\") pod \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.117125 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-catalog-content\") pod \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.117167 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-utilities\") pod \"fec9d9bd-a20b-4625-9070-19949999c206\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.117198 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-utilities\") pod \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.117225 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-trusted-ca\") pod \"0b12867e-de02-4b45-ac09-5140aab7451e\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.118013 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcdkj\" (UniqueName: \"kubernetes.io/projected/fec9d9bd-a20b-4625-9070-19949999c206-kube-api-access-rcdkj\") pod \"fec9d9bd-a20b-4625-9070-19949999c206\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.117950 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-utilities" (OuterVolumeSpecName: "utilities") pod "9c9bd510-5d62-4814-bd88-62c5a3051f9d" (UID: "9c9bd510-5d62-4814-bd88-62c5a3051f9d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.118138 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-utilities" (OuterVolumeSpecName: "utilities") pod "fec9d9bd-a20b-4625-9070-19949999c206" (UID: "fec9d9bd-a20b-4625-9070-19949999c206"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.118443 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-catalog-content\") pod \"fec9d9bd-a20b-4625-9070-19949999c206\" (UID: \"fec9d9bd-a20b-4625-9070-19949999c206\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.118470 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-catalog-content\") pod \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.118946 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "0b12867e-de02-4b45-ac09-5140aab7451e" (UID: "0b12867e-de02-4b45-ac09-5140aab7451e"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.121798 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-kube-api-access-46l8s" (OuterVolumeSpecName: "kube-api-access-46l8s") pod "c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" (UID: "c847e258-e0f2-4129-bcf4-6fc12cd4dfe5"). InnerVolumeSpecName "kube-api-access-46l8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.123388 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fec9d9bd-a20b-4625-9070-19949999c206-kube-api-access-rcdkj" (OuterVolumeSpecName: "kube-api-access-rcdkj") pod "fec9d9bd-a20b-4625-9070-19949999c206" (UID: "fec9d9bd-a20b-4625-9070-19949999c206"). InnerVolumeSpecName "kube-api-access-rcdkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.133786 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-utilities\") pod \"7a98b449-5e32-4f53-8829-fc2d01b603b8\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.133934 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-catalog-content\") pod \"7a98b449-5e32-4f53-8829-fc2d01b603b8\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.134716 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-utilities" (OuterVolumeSpecName: "utilities") pod "7a98b449-5e32-4f53-8829-fc2d01b603b8" (UID: "7a98b449-5e32-4f53-8829-fc2d01b603b8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.138386 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c9bd510-5d62-4814-bd88-62c5a3051f9d" (UID: "9c9bd510-5d62-4814-bd88-62c5a3051f9d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.142106 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkx6z\" (UniqueName: \"kubernetes.io/projected/9c9bd510-5d62-4814-bd88-62c5a3051f9d-kube-api-access-rkx6z\") pod \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\" (UID: \"9c9bd510-5d62-4814-bd88-62c5a3051f9d\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.142179 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxsgj\" (UniqueName: \"kubernetes.io/projected/0b12867e-de02-4b45-ac09-5140aab7451e-kube-api-access-vxsgj\") pod \"0b12867e-de02-4b45-ac09-5140aab7451e\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.142250 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-utilities\") pod \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.142295 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-operator-metrics\") pod \"0b12867e-de02-4b45-ac09-5140aab7451e\" (UID: \"0b12867e-de02-4b45-ac09-5140aab7451e\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.142365 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-catalog-content\") pod \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\" (UID: \"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.142404 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rd86j\" (UniqueName: \"kubernetes.io/projected/74c8940c-1ed3-4aaa-94aa-0623f25f008e-kube-api-access-rd86j\") pod \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.142456 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxtzx\" (UniqueName: \"kubernetes.io/projected/7a98b449-5e32-4f53-8829-fc2d01b603b8-kube-api-access-cxtzx\") pod \"7a98b449-5e32-4f53-8829-fc2d01b603b8\" (UID: \"7a98b449-5e32-4f53-8829-fc2d01b603b8\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.142493 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-utilities\") pod \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\" (UID: \"74c8940c-1ed3-4aaa-94aa-0623f25f008e\") " Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.143528 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-utilities" (OuterVolumeSpecName: "utilities") pod "c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" (UID: "c847e258-e0f2-4129-bcf4-6fc12cd4dfe5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.144739 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8gdn\" (UniqueName: \"kubernetes.io/projected/73b58e95-46d5-468b-9890-a4fc3c5a0bde-kube-api-access-m8gdn\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.144826 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-utilities" (OuterVolumeSpecName: "utilities") pod "74c8940c-1ed3-4aaa-94aa-0623f25f008e" (UID: "74c8940c-1ed3-4aaa-94aa-0623f25f008e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.144858 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.144938 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.144957 4946 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.144970 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcdkj\" (UniqueName: \"kubernetes.io/projected/fec9d9bd-a20b-4625-9070-19949999c206-kube-api-access-rcdkj\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.144982 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c9bd510-5d62-4814-bd88-62c5a3051f9d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.144993 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.145006 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.145017 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.145028 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5jk2\" (UniqueName: \"kubernetes.io/projected/8c9f7504-90bf-4e33-be97-43f5d81896ae-kube-api-access-t5jk2\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.145038 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73b58e95-46d5-468b-9890-a4fc3c5a0bde-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.145048 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.145058 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9f7504-90bf-4e33-be97-43f5d81896ae-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.145069 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46l8s\" (UniqueName: \"kubernetes.io/projected/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-kube-api-access-46l8s\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.145565 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c9bd510-5d62-4814-bd88-62c5a3051f9d-kube-api-access-rkx6z" (OuterVolumeSpecName: "kube-api-access-rkx6z") pod "9c9bd510-5d62-4814-bd88-62c5a3051f9d" (UID: "9c9bd510-5d62-4814-bd88-62c5a3051f9d"). InnerVolumeSpecName "kube-api-access-rkx6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.146597 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "0b12867e-de02-4b45-ac09-5140aab7451e" (UID: "0b12867e-de02-4b45-ac09-5140aab7451e"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.147464 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b12867e-de02-4b45-ac09-5140aab7451e-kube-api-access-vxsgj" (OuterVolumeSpecName: "kube-api-access-vxsgj") pod "0b12867e-de02-4b45-ac09-5140aab7451e" (UID: "0b12867e-de02-4b45-ac09-5140aab7451e"). InnerVolumeSpecName "kube-api-access-vxsgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.147891 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74c8940c-1ed3-4aaa-94aa-0623f25f008e-kube-api-access-rd86j" (OuterVolumeSpecName: "kube-api-access-rd86j") pod "74c8940c-1ed3-4aaa-94aa-0623f25f008e" (UID: "74c8940c-1ed3-4aaa-94aa-0623f25f008e"). InnerVolumeSpecName "kube-api-access-rd86j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.148349 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a98b449-5e32-4f53-8829-fc2d01b603b8-kube-api-access-cxtzx" (OuterVolumeSpecName: "kube-api-access-cxtzx") pod "7a98b449-5e32-4f53-8829-fc2d01b603b8" (UID: "7a98b449-5e32-4f53-8829-fc2d01b603b8"). InnerVolumeSpecName "kube-api-access-cxtzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.158533 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a98b449-5e32-4f53-8829-fc2d01b603b8" (UID: "7a98b449-5e32-4f53-8829-fc2d01b603b8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.188286 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fec9d9bd-a20b-4625-9070-19949999c206" (UID: "fec9d9bd-a20b-4625-9070-19949999c206"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.244742 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "74c8940c-1ed3-4aaa-94aa-0623f25f008e" (UID: "74c8940c-1ed3-4aaa-94aa-0623f25f008e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.246049 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rd86j\" (UniqueName: \"kubernetes.io/projected/74c8940c-1ed3-4aaa-94aa-0623f25f008e-kube-api-access-rd86j\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.246079 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxtzx\" (UniqueName: \"kubernetes.io/projected/7a98b449-5e32-4f53-8829-fc2d01b603b8-kube-api-access-cxtzx\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.246095 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.246110 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74c8940c-1ed3-4aaa-94aa-0623f25f008e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.246609 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fec9d9bd-a20b-4625-9070-19949999c206-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.246622 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a98b449-5e32-4f53-8829-fc2d01b603b8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.246632 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkx6z\" (UniqueName: \"kubernetes.io/projected/9c9bd510-5d62-4814-bd88-62c5a3051f9d-kube-api-access-rkx6z\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.246642 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxsgj\" (UniqueName: \"kubernetes.io/projected/0b12867e-de02-4b45-ac09-5140aab7451e-kube-api-access-vxsgj\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.246686 4946 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0b12867e-de02-4b45-ac09-5140aab7451e-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.256079 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" (UID: "c847e258-e0f2-4129-bcf4-6fc12cd4dfe5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.348776 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.837979 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m666j" event={"ID":"c847e258-e0f2-4129-bcf4-6fc12cd4dfe5","Type":"ContainerDied","Data":"7a18209d2bb4913901f40399553d8cccaaf71349542ad8f9df49a38ea45c6ba7"} Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.838280 4946 scope.go:117] "RemoveContainer" containerID="6dd2e8d46022b0e9908a3b1243d4d720615730d5c0386bf24fb4257abb30ace3" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.838026 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m666j" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.841659 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p65s4" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.841693 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p65s4" event={"ID":"9c9bd510-5d62-4814-bd88-62c5a3051f9d","Type":"ContainerDied","Data":"041d8374272d456cc7d7f2386ecf4329645af6f3136e5ce70c8802290702f418"} Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.845343 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8mgfv" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.846297 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-df2mv" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.846334 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlxj" event={"ID":"7a98b449-5e32-4f53-8829-fc2d01b603b8","Type":"ContainerDied","Data":"bfa77449c22519cdb969dcb3682a9fed5102932a72a8d1d8e7a3a0f34fb91054"} Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.846460 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mzks" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.846475 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kr77j" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.846514 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z4t8l" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.847177 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrlxj" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.859160 4946 scope.go:117] "RemoveContainer" containerID="4d0b141bd14f5252f1a968e4316ba4d0ac711635aef7b5e8b9a083eb2106c953" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.880666 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m666j"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.889102 4946 scope.go:117] "RemoveContainer" containerID="5efc02c5dfaa11891ddbe4d8b5bb46eae0ef6ebe11b23c8a0d37947e87a7b06a" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.889156 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m666j"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.897150 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p65s4"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.902208 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p65s4"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.918448 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8mgfv"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.922685 4946 scope.go:117] "RemoveContainer" containerID="417a02e48c8d926d32b45be48cbe68d8ee5db50498770d81bfe1c02efa84c7e7" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.931216 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8mgfv"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.936137 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7mzks"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.946749 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7mzks"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.952476 4946 scope.go:117] "RemoveContainer" containerID="ec80fda59a454836a1fafb8e0d51b85e9f921e4cff448bab6ac0010797bcf9c8" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.953740 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4t8l"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.972786 4946 scope.go:117] "RemoveContainer" containerID="77f6ade1abff12cea871879f6a6a5b4381b231c95a8d3e57e4340b64d7b13ff8" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.983143 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z4t8l"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.988368 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-df2mv"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.991185 4946 scope.go:117] "RemoveContainer" containerID="04c5e1e86ce28602ac8333d7f6be01a97007faa7102cee140fae2de6453a9b26" Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.993029 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-df2mv"] Dec 04 15:07:35 crc kubenswrapper[4946]: I1204 15:07:35.996444 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kr77j"] Dec 04 15:07:36 crc kubenswrapper[4946]: I1204 15:07:35.999835 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kr77j"] Dec 04 15:07:36 crc kubenswrapper[4946]: I1204 15:07:36.003072 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlxj"] Dec 04 15:07:36 crc kubenswrapper[4946]: I1204 15:07:36.006044 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlxj"] Dec 04 15:07:36 crc kubenswrapper[4946]: I1204 15:07:36.006170 4946 scope.go:117] "RemoveContainer" containerID="88721f75faa2fb1fd1966132e0bf9a0011a27f8a16a2224ec92d366d9ae7231b" Dec 04 15:07:36 crc kubenswrapper[4946]: I1204 15:07:36.018240 4946 scope.go:117] "RemoveContainer" containerID="cc55a7e1d0f40151d69d4ff686aa0f4be52cba827fe4064ec2dfb7ddf77119cf" Dec 04 15:07:37 crc kubenswrapper[4946]: I1204 15:07:37.463428 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b12867e-de02-4b45-ac09-5140aab7451e" path="/var/lib/kubelet/pods/0b12867e-de02-4b45-ac09-5140aab7451e/volumes" Dec 04 15:07:37 crc kubenswrapper[4946]: I1204 15:07:37.464823 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" path="/var/lib/kubelet/pods/73b58e95-46d5-468b-9890-a4fc3c5a0bde/volumes" Dec 04 15:07:37 crc kubenswrapper[4946]: I1204 15:07:37.465892 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" path="/var/lib/kubelet/pods/74c8940c-1ed3-4aaa-94aa-0623f25f008e/volumes" Dec 04 15:07:37 crc kubenswrapper[4946]: I1204 15:07:37.467420 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" path="/var/lib/kubelet/pods/7a98b449-5e32-4f53-8829-fc2d01b603b8/volumes" Dec 04 15:07:37 crc kubenswrapper[4946]: I1204 15:07:37.468551 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" path="/var/lib/kubelet/pods/8c9f7504-90bf-4e33-be97-43f5d81896ae/volumes" Dec 04 15:07:37 crc kubenswrapper[4946]: I1204 15:07:37.470170 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" path="/var/lib/kubelet/pods/9c9bd510-5d62-4814-bd88-62c5a3051f9d/volumes" Dec 04 15:07:37 crc kubenswrapper[4946]: I1204 15:07:37.471029 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" path="/var/lib/kubelet/pods/c847e258-e0f2-4129-bcf4-6fc12cd4dfe5/volumes" Dec 04 15:07:37 crc kubenswrapper[4946]: I1204 15:07:37.472017 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fec9d9bd-a20b-4625-9070-19949999c206" path="/var/lib/kubelet/pods/fec9d9bd-a20b-4625-9070-19949999c206/volumes" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.870984 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.871407 4946 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="b6e0b8df59a29e319a03bfb1f0daf5a44cd07c8a86aa66d04139f437ae05963f" exitCode=137 Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.927456 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.927558 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.991726 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.991847 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.991884 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.991968 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.991992 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.992067 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.992180 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.992381 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.992476 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.992761 4946 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.992808 4946 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.992838 4946 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.992865 4946 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:38 crc kubenswrapper[4946]: I1204 15:07:38.999620 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:07:39 crc kubenswrapper[4946]: I1204 15:07:39.094610 4946 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 04 15:07:39 crc kubenswrapper[4946]: I1204 15:07:39.464080 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 04 15:07:39 crc kubenswrapper[4946]: I1204 15:07:39.464980 4946 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 04 15:07:39 crc kubenswrapper[4946]: I1204 15:07:39.475308 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 15:07:39 crc kubenswrapper[4946]: I1204 15:07:39.475353 4946 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="5330a34c-0a49-4d48-ac89-6da4f524e3d1" Dec 04 15:07:39 crc kubenswrapper[4946]: I1204 15:07:39.479406 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 15:07:39 crc kubenswrapper[4946]: I1204 15:07:39.479451 4946 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="5330a34c-0a49-4d48-ac89-6da4f524e3d1" Dec 04 15:07:39 crc kubenswrapper[4946]: I1204 15:07:39.879083 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 04 15:07:39 crc kubenswrapper[4946]: I1204 15:07:39.879460 4946 scope.go:117] "RemoveContainer" containerID="b6e0b8df59a29e319a03bfb1f0daf5a44cd07c8a86aa66d04139f437ae05963f" Dec 04 15:07:39 crc kubenswrapper[4946]: I1204 15:07:39.879560 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.515758 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2gvfq"] Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.516945 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.516965 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.516981 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.516989 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517003 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517012 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517023 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517031 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517041 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517048 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517058 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517065 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517075 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517083 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517098 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517106 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517161 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517170 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517184 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517192 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517200 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517210 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517220 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b12867e-de02-4b45-ac09-5140aab7451e" containerName="marketplace-operator" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517415 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b12867e-de02-4b45-ac09-5140aab7451e" containerName="marketplace-operator" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517425 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517433 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517445 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fec9d9bd-a20b-4625-9070-19949999c206" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517453 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="fec9d9bd-a20b-4625-9070-19949999c206" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517463 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517470 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517478 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517489 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517503 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ba0b499-56a8-4e62-93cc-c2f2f1cad117" containerName="installer" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517512 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ba0b499-56a8-4e62-93cc-c2f2f1cad117" containerName="installer" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517522 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fec9d9bd-a20b-4625-9070-19949999c206" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517530 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="fec9d9bd-a20b-4625-9070-19949999c206" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517541 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517549 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517558 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517566 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517575 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517584 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517593 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fec9d9bd-a20b-4625-9070-19949999c206" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517602 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="fec9d9bd-a20b-4625-9070-19949999c206" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517617 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517624 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" containerName="extract-content" Dec 04 15:07:48 crc kubenswrapper[4946]: E1204 15:07:48.517632 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517641 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" containerName="extract-utilities" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517763 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="74c8940c-1ed3-4aaa-94aa-0623f25f008e" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517774 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517786 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="fec9d9bd-a20b-4625-9070-19949999c206" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517798 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b12867e-de02-4b45-ac09-5140aab7451e" containerName="marketplace-operator" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517807 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c847e258-e0f2-4129-bcf4-6fc12cd4dfe5" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517819 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a98b449-5e32-4f53-8829-fc2d01b603b8" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517827 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ba0b499-56a8-4e62-93cc-c2f2f1cad117" containerName="installer" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517838 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c9f7504-90bf-4e33-be97-43f5d81896ae" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517848 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c9bd510-5d62-4814-bd88-62c5a3051f9d" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.517856 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="73b58e95-46d5-468b-9890-a4fc3c5a0bde" containerName="registry-server" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.518719 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.522023 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.522089 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.522306 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.522919 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.532684 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.533733 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2gvfq"] Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.632065 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2gvfq\" (UID: \"0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.632151 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6w2k\" (UniqueName: \"kubernetes.io/projected/0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8-kube-api-access-p6w2k\") pod \"marketplace-operator-79b997595-2gvfq\" (UID: \"0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.632187 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2gvfq\" (UID: \"0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.734026 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2gvfq\" (UID: \"0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.734103 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6w2k\" (UniqueName: \"kubernetes.io/projected/0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8-kube-api-access-p6w2k\") pod \"marketplace-operator-79b997595-2gvfq\" (UID: \"0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.734158 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2gvfq\" (UID: \"0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.735235 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2gvfq\" (UID: \"0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.751928 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2gvfq\" (UID: \"0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.752367 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6w2k\" (UniqueName: \"kubernetes.io/projected/0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8-kube-api-access-p6w2k\") pod \"marketplace-operator-79b997595-2gvfq\" (UID: \"0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8\") " pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:48 crc kubenswrapper[4946]: I1204 15:07:48.838466 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:49 crc kubenswrapper[4946]: I1204 15:07:49.147078 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2gvfq"] Dec 04 15:07:49 crc kubenswrapper[4946]: I1204 15:07:49.956776 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" event={"ID":"0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8","Type":"ContainerStarted","Data":"4a379d52f688769488aba4bf2fccaaa7fe4f89e9e56f04dc7f60ee7269c0667a"} Dec 04 15:07:49 crc kubenswrapper[4946]: I1204 15:07:49.957139 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" event={"ID":"0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8","Type":"ContainerStarted","Data":"5706820bc43c5233733a46130550cc7f173346eea2a979c9cef62f59c352f80b"} Dec 04 15:07:49 crc kubenswrapper[4946]: I1204 15:07:49.957160 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:49 crc kubenswrapper[4946]: I1204 15:07:49.962239 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" Dec 04 15:07:49 crc kubenswrapper[4946]: I1204 15:07:49.973514 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-2gvfq" podStartSLOduration=1.973489862 podStartE2EDuration="1.973489862s" podCreationTimestamp="2025-12-04 15:07:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:07:49.973156873 +0000 UTC m=+320.859200514" watchObservedRunningTime="2025-12-04 15:07:49.973489862 +0000 UTC m=+320.859533503" Dec 04 15:07:53 crc kubenswrapper[4946]: I1204 15:07:53.979951 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 04 15:07:53 crc kubenswrapper[4946]: I1204 15:07:53.982893 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 04 15:07:53 crc kubenswrapper[4946]: I1204 15:07:53.982968 4946 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="dea2988fe3b8f6ecb0043335a55301daecdb9c94aa4d61a902f28bbfb7f12aed" exitCode=137 Dec 04 15:07:53 crc kubenswrapper[4946]: I1204 15:07:53.983016 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"dea2988fe3b8f6ecb0043335a55301daecdb9c94aa4d61a902f28bbfb7f12aed"} Dec 04 15:07:53 crc kubenswrapper[4946]: I1204 15:07:53.983074 4946 scope.go:117] "RemoveContainer" containerID="5bfb7a6a63ec6bc4c0c96ce8e9f2a29ca19f093c1f93ee274f55608f68c4d97f" Dec 04 15:07:54 crc kubenswrapper[4946]: I1204 15:07:54.990514 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 04 15:07:54 crc kubenswrapper[4946]: I1204 15:07:54.991934 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c15be9f3303e9fedd879b337286824515e69d56818a34fa0a71ff25d6c4c8d1e"} Dec 04 15:07:55 crc kubenswrapper[4946]: I1204 15:07:55.924371 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:08:03 crc kubenswrapper[4946]: I1204 15:08:03.576084 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:08:03 crc kubenswrapper[4946]: I1204 15:08:03.586275 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:08:04 crc kubenswrapper[4946]: I1204 15:08:04.053626 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 15:08:22 crc kubenswrapper[4946]: I1204 15:08:22.478349 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:08:22 crc kubenswrapper[4946]: I1204 15:08:22.478858 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:08:23 crc kubenswrapper[4946]: I1204 15:08:23.679793 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bn6ld"] Dec 04 15:08:23 crc kubenswrapper[4946]: I1204 15:08:23.680161 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" podUID="93885d87-d68e-4445-abd6-ece851137b17" containerName="controller-manager" containerID="cri-o://94005a1343f7908b304644fadf0c4c13fab36e6b6933b63dfb088ae1aa4162cc" gracePeriod=30 Dec 04 15:08:23 crc kubenswrapper[4946]: I1204 15:08:23.684381 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq"] Dec 04 15:08:23 crc kubenswrapper[4946]: I1204 15:08:23.685041 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" podUID="1f998c86-99a4-4416-b810-b40a8fb1775f" containerName="route-controller-manager" containerID="cri-o://7a3add17eb2833c5217722896513f5427b4dfd76abcb2c29f862310da944c245" gracePeriod=30 Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.170203 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f998c86-99a4-4416-b810-b40a8fb1775f" containerID="7a3add17eb2833c5217722896513f5427b4dfd76abcb2c29f862310da944c245" exitCode=0 Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.170327 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" event={"ID":"1f998c86-99a4-4416-b810-b40a8fb1775f","Type":"ContainerDied","Data":"7a3add17eb2833c5217722896513f5427b4dfd76abcb2c29f862310da944c245"} Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.172179 4946 generic.go:334] "Generic (PLEG): container finished" podID="93885d87-d68e-4445-abd6-ece851137b17" containerID="94005a1343f7908b304644fadf0c4c13fab36e6b6933b63dfb088ae1aa4162cc" exitCode=0 Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.172213 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" event={"ID":"93885d87-d68e-4445-abd6-ece851137b17","Type":"ContainerDied","Data":"94005a1343f7908b304644fadf0c4c13fab36e6b6933b63dfb088ae1aa4162cc"} Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.590669 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.597579 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.659730 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-client-ca\") pod \"93885d87-d68e-4445-abd6-ece851137b17\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.659833 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqbr8\" (UniqueName: \"kubernetes.io/projected/93885d87-d68e-4445-abd6-ece851137b17-kube-api-access-cqbr8\") pod \"93885d87-d68e-4445-abd6-ece851137b17\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.659947 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-config\") pod \"93885d87-d68e-4445-abd6-ece851137b17\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.659989 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f998c86-99a4-4416-b810-b40a8fb1775f-serving-cert\") pod \"1f998c86-99a4-4416-b810-b40a8fb1775f\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.660031 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-config\") pod \"1f998c86-99a4-4416-b810-b40a8fb1775f\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.660133 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-proxy-ca-bundles\") pod \"93885d87-d68e-4445-abd6-ece851137b17\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.660156 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93885d87-d68e-4445-abd6-ece851137b17-serving-cert\") pod \"93885d87-d68e-4445-abd6-ece851137b17\" (UID: \"93885d87-d68e-4445-abd6-ece851137b17\") " Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.660179 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hxpn\" (UniqueName: \"kubernetes.io/projected/1f998c86-99a4-4416-b810-b40a8fb1775f-kube-api-access-2hxpn\") pod \"1f998c86-99a4-4416-b810-b40a8fb1775f\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.660215 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-client-ca\") pod \"1f998c86-99a4-4416-b810-b40a8fb1775f\" (UID: \"1f998c86-99a4-4416-b810-b40a8fb1775f\") " Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.661672 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-client-ca" (OuterVolumeSpecName: "client-ca") pod "93885d87-d68e-4445-abd6-ece851137b17" (UID: "93885d87-d68e-4445-abd6-ece851137b17"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.661834 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "93885d87-d68e-4445-abd6-ece851137b17" (UID: "93885d87-d68e-4445-abd6-ece851137b17"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.661895 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-client-ca" (OuterVolumeSpecName: "client-ca") pod "1f998c86-99a4-4416-b810-b40a8fb1775f" (UID: "1f998c86-99a4-4416-b810-b40a8fb1775f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.661957 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-config" (OuterVolumeSpecName: "config") pod "93885d87-d68e-4445-abd6-ece851137b17" (UID: "93885d87-d68e-4445-abd6-ece851137b17"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.662045 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-config" (OuterVolumeSpecName: "config") pod "1f998c86-99a4-4416-b810-b40a8fb1775f" (UID: "1f998c86-99a4-4416-b810-b40a8fb1775f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.667567 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93885d87-d68e-4445-abd6-ece851137b17-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "93885d87-d68e-4445-abd6-ece851137b17" (UID: "93885d87-d68e-4445-abd6-ece851137b17"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.669501 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f998c86-99a4-4416-b810-b40a8fb1775f-kube-api-access-2hxpn" (OuterVolumeSpecName: "kube-api-access-2hxpn") pod "1f998c86-99a4-4416-b810-b40a8fb1775f" (UID: "1f998c86-99a4-4416-b810-b40a8fb1775f"). InnerVolumeSpecName "kube-api-access-2hxpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.670475 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93885d87-d68e-4445-abd6-ece851137b17-kube-api-access-cqbr8" (OuterVolumeSpecName: "kube-api-access-cqbr8") pod "93885d87-d68e-4445-abd6-ece851137b17" (UID: "93885d87-d68e-4445-abd6-ece851137b17"). InnerVolumeSpecName "kube-api-access-cqbr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.670618 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f998c86-99a4-4416-b810-b40a8fb1775f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1f998c86-99a4-4416-b810-b40a8fb1775f" (UID: "1f998c86-99a4-4416-b810-b40a8fb1775f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.762066 4946 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.763295 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93885d87-d68e-4445-abd6-ece851137b17-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.763328 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hxpn\" (UniqueName: \"kubernetes.io/projected/1f998c86-99a4-4416-b810-b40a8fb1775f-kube-api-access-2hxpn\") on node \"crc\" DevicePath \"\"" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.763345 4946 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.763362 4946 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.763381 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqbr8\" (UniqueName: \"kubernetes.io/projected/93885d87-d68e-4445-abd6-ece851137b17-kube-api-access-cqbr8\") on node \"crc\" DevicePath \"\"" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.763395 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93885d87-d68e-4445-abd6-ece851137b17-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.763406 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f998c86-99a4-4416-b810-b40a8fb1775f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:08:24 crc kubenswrapper[4946]: I1204 15:08:24.763417 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f998c86-99a4-4416-b810-b40a8fb1775f-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.008228 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d"] Dec 04 15:08:25 crc kubenswrapper[4946]: E1204 15:08:25.012150 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93885d87-d68e-4445-abd6-ece851137b17" containerName="controller-manager" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.012178 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="93885d87-d68e-4445-abd6-ece851137b17" containerName="controller-manager" Dec 04 15:08:25 crc kubenswrapper[4946]: E1204 15:08:25.012213 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f998c86-99a4-4416-b810-b40a8fb1775f" containerName="route-controller-manager" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.012225 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f998c86-99a4-4416-b810-b40a8fb1775f" containerName="route-controller-manager" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.012434 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="93885d87-d68e-4445-abd6-ece851137b17" containerName="controller-manager" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.012457 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f998c86-99a4-4416-b810-b40a8fb1775f" containerName="route-controller-manager" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.013070 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.037485 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-84f6b5785b-j6wmh"] Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.039258 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.050333 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d"] Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.054438 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-84f6b5785b-j6wmh"] Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.167879 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-proxy-ca-bundles\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.167935 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-client-ca\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.167962 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-config\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.167995 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6brp\" (UniqueName: \"kubernetes.io/projected/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-kube-api-access-l6brp\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.168021 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-config\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.168045 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-serving-cert\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.168060 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-serving-cert\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.168077 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-client-ca\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.168099 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r44hn\" (UniqueName: \"kubernetes.io/projected/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-kube-api-access-r44hn\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.182023 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.182010 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bn6ld" event={"ID":"93885d87-d68e-4445-abd6-ece851137b17","Type":"ContainerDied","Data":"a518f98c0ead0af32911567b6c0c825568c0eafbb7afab335b355cd481f42cf4"} Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.182169 4946 scope.go:117] "RemoveContainer" containerID="94005a1343f7908b304644fadf0c4c13fab36e6b6933b63dfb088ae1aa4162cc" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.186916 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" event={"ID":"1f998c86-99a4-4416-b810-b40a8fb1775f","Type":"ContainerDied","Data":"b195f00c8871a9f9866e625de6834d6aecb186f263506b3ecf8804f14984a312"} Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.187011 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.207308 4946 scope.go:117] "RemoveContainer" containerID="7a3add17eb2833c5217722896513f5427b4dfd76abcb2c29f862310da944c245" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.217599 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq"] Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.223593 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fqprq"] Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.236578 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bn6ld"] Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.240435 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bn6ld"] Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.269886 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-proxy-ca-bundles\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.269946 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-client-ca\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.269973 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-config\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.269998 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6brp\" (UniqueName: \"kubernetes.io/projected/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-kube-api-access-l6brp\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.270021 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-config\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.270041 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-serving-cert\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.270059 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-serving-cert\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.270077 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-client-ca\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.270102 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r44hn\" (UniqueName: \"kubernetes.io/projected/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-kube-api-access-r44hn\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.272059 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-proxy-ca-bundles\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.272203 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-config\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.272275 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-client-ca\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.272411 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-client-ca\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.273251 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-config\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.280818 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-serving-cert\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.281345 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-serving-cert\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.294143 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r44hn\" (UniqueName: \"kubernetes.io/projected/5d76c785-e3b3-4b17-8c34-483ca66bf2cf-kube-api-access-r44hn\") pod \"route-controller-manager-86f4b6db58-nrf2d\" (UID: \"5d76c785-e3b3-4b17-8c34-483ca66bf2cf\") " pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.299020 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6brp\" (UniqueName: \"kubernetes.io/projected/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-kube-api-access-l6brp\") pod \"controller-manager-84f6b5785b-j6wmh\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.339908 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.355337 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.459807 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f998c86-99a4-4416-b810-b40a8fb1775f" path="/var/lib/kubelet/pods/1f998c86-99a4-4416-b810-b40a8fb1775f/volumes" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.460863 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93885d87-d68e-4445-abd6-ece851137b17" path="/var/lib/kubelet/pods/93885d87-d68e-4445-abd6-ece851137b17/volumes" Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.550516 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d"] Dec 04 15:08:25 crc kubenswrapper[4946]: I1204 15:08:25.614525 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-84f6b5785b-j6wmh"] Dec 04 15:08:25 crc kubenswrapper[4946]: W1204 15:08:25.636342 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99a0d72f_3fdf_45e4_bb50_c1855a37f6f4.slice/crio-61a9ca4500f0a378574563e8e43aa2fd68fc3133c4e3860090133642e2d25d9f WatchSource:0}: Error finding container 61a9ca4500f0a378574563e8e43aa2fd68fc3133c4e3860090133642e2d25d9f: Status 404 returned error can't find the container with id 61a9ca4500f0a378574563e8e43aa2fd68fc3133c4e3860090133642e2d25d9f Dec 04 15:08:26 crc kubenswrapper[4946]: I1204 15:08:26.193459 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" event={"ID":"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4","Type":"ContainerStarted","Data":"511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b"} Dec 04 15:08:26 crc kubenswrapper[4946]: I1204 15:08:26.193793 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:26 crc kubenswrapper[4946]: I1204 15:08:26.193809 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" event={"ID":"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4","Type":"ContainerStarted","Data":"61a9ca4500f0a378574563e8e43aa2fd68fc3133c4e3860090133642e2d25d9f"} Dec 04 15:08:26 crc kubenswrapper[4946]: I1204 15:08:26.198747 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:08:26 crc kubenswrapper[4946]: I1204 15:08:26.199024 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" event={"ID":"5d76c785-e3b3-4b17-8c34-483ca66bf2cf","Type":"ContainerStarted","Data":"ee08ffcdc436cddc8b3ee1434e095662e881a6bcaa5b9dcb85b4e6d2a82bbcde"} Dec 04 15:08:26 crc kubenswrapper[4946]: I1204 15:08:26.199064 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" event={"ID":"5d76c785-e3b3-4b17-8c34-483ca66bf2cf","Type":"ContainerStarted","Data":"4149951b79c6e3921ee63fb128aa62b3a0c3d639c5fdf1c8d1fa4819fe576644"} Dec 04 15:08:26 crc kubenswrapper[4946]: I1204 15:08:26.199157 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:26 crc kubenswrapper[4946]: I1204 15:08:26.203950 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" Dec 04 15:08:26 crc kubenswrapper[4946]: I1204 15:08:26.214409 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" podStartSLOduration=3.214383901 podStartE2EDuration="3.214383901s" podCreationTimestamp="2025-12-04 15:08:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:08:26.210694112 +0000 UTC m=+357.096737763" watchObservedRunningTime="2025-12-04 15:08:26.214383901 +0000 UTC m=+357.100427542" Dec 04 15:08:26 crc kubenswrapper[4946]: I1204 15:08:26.238797 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-86f4b6db58-nrf2d" podStartSLOduration=3.238775016 podStartE2EDuration="3.238775016s" podCreationTimestamp="2025-12-04 15:08:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:08:26.235626154 +0000 UTC m=+357.121669795" watchObservedRunningTime="2025-12-04 15:08:26.238775016 +0000 UTC m=+357.124818657" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.550476 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mgklh"] Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.551532 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.556794 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.566280 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mgklh"] Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.658700 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64dc1b8a-cfaa-435d-a093-fff34239250b-catalog-content\") pod \"redhat-marketplace-mgklh\" (UID: \"64dc1b8a-cfaa-435d-a093-fff34239250b\") " pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.658816 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64dc1b8a-cfaa-435d-a093-fff34239250b-utilities\") pod \"redhat-marketplace-mgklh\" (UID: \"64dc1b8a-cfaa-435d-a093-fff34239250b\") " pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.658877 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9whkl\" (UniqueName: \"kubernetes.io/projected/64dc1b8a-cfaa-435d-a093-fff34239250b-kube-api-access-9whkl\") pod \"redhat-marketplace-mgklh\" (UID: \"64dc1b8a-cfaa-435d-a093-fff34239250b\") " pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.748805 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l6sf5"] Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.749775 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.753264 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.759919 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64dc1b8a-cfaa-435d-a093-fff34239250b-catalog-content\") pod \"redhat-marketplace-mgklh\" (UID: \"64dc1b8a-cfaa-435d-a093-fff34239250b\") " pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.759960 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64dc1b8a-cfaa-435d-a093-fff34239250b-utilities\") pod \"redhat-marketplace-mgklh\" (UID: \"64dc1b8a-cfaa-435d-a093-fff34239250b\") " pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.759995 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9whkl\" (UniqueName: \"kubernetes.io/projected/64dc1b8a-cfaa-435d-a093-fff34239250b-kube-api-access-9whkl\") pod \"redhat-marketplace-mgklh\" (UID: \"64dc1b8a-cfaa-435d-a093-fff34239250b\") " pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.760660 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64dc1b8a-cfaa-435d-a093-fff34239250b-catalog-content\") pod \"redhat-marketplace-mgklh\" (UID: \"64dc1b8a-cfaa-435d-a093-fff34239250b\") " pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.761056 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64dc1b8a-cfaa-435d-a093-fff34239250b-utilities\") pod \"redhat-marketplace-mgklh\" (UID: \"64dc1b8a-cfaa-435d-a093-fff34239250b\") " pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.761831 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l6sf5"] Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.800015 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9whkl\" (UniqueName: \"kubernetes.io/projected/64dc1b8a-cfaa-435d-a093-fff34239250b-kube-api-access-9whkl\") pod \"redhat-marketplace-mgklh\" (UID: \"64dc1b8a-cfaa-435d-a093-fff34239250b\") " pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.861197 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l79f\" (UniqueName: \"kubernetes.io/projected/3fcf10a5-8a06-4542-9839-91e2881b5a5e-kube-api-access-4l79f\") pod \"certified-operators-l6sf5\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.861484 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-catalog-content\") pod \"certified-operators-l6sf5\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.861554 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-utilities\") pod \"certified-operators-l6sf5\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.868831 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.964050 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-utilities\") pod \"certified-operators-l6sf5\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.964149 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l79f\" (UniqueName: \"kubernetes.io/projected/3fcf10a5-8a06-4542-9839-91e2881b5a5e-kube-api-access-4l79f\") pod \"certified-operators-l6sf5\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.964204 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-catalog-content\") pod \"certified-operators-l6sf5\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.964960 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-catalog-content\") pod \"certified-operators-l6sf5\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:31 crc kubenswrapper[4946]: I1204 15:08:31.966448 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-utilities\") pod \"certified-operators-l6sf5\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:32 crc kubenswrapper[4946]: I1204 15:08:31.993563 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l79f\" (UniqueName: \"kubernetes.io/projected/3fcf10a5-8a06-4542-9839-91e2881b5a5e-kube-api-access-4l79f\") pod \"certified-operators-l6sf5\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:32 crc kubenswrapper[4946]: I1204 15:08:32.065441 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:32 crc kubenswrapper[4946]: I1204 15:08:32.278313 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mgklh"] Dec 04 15:08:32 crc kubenswrapper[4946]: I1204 15:08:32.484689 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l6sf5"] Dec 04 15:08:32 crc kubenswrapper[4946]: W1204 15:08:32.508183 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fcf10a5_8a06_4542_9839_91e2881b5a5e.slice/crio-3e560638d52f9384f04418e922057c7fd7abaae33aadc4528712ea933471999f WatchSource:0}: Error finding container 3e560638d52f9384f04418e922057c7fd7abaae33aadc4528712ea933471999f: Status 404 returned error can't find the container with id 3e560638d52f9384f04418e922057c7fd7abaae33aadc4528712ea933471999f Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.245554 4946 generic.go:334] "Generic (PLEG): container finished" podID="64dc1b8a-cfaa-435d-a093-fff34239250b" containerID="617e239f43a95eac954b05817d88dec1a3a5bc722baff2523a7b0d69445b0e4d" exitCode=0 Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.245639 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mgklh" event={"ID":"64dc1b8a-cfaa-435d-a093-fff34239250b","Type":"ContainerDied","Data":"617e239f43a95eac954b05817d88dec1a3a5bc722baff2523a7b0d69445b0e4d"} Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.246051 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mgklh" event={"ID":"64dc1b8a-cfaa-435d-a093-fff34239250b","Type":"ContainerStarted","Data":"cbd1a0e64bf1bf1a8bf3bd27d13c8bb27e3a508cc8be74b225039c485c6a6e7e"} Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.251314 4946 generic.go:334] "Generic (PLEG): container finished" podID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" containerID="b2727b722e3a70afd501ccad98a0252f007abdf45aee111a177319c19bc76edf" exitCode=0 Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.251371 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6sf5" event={"ID":"3fcf10a5-8a06-4542-9839-91e2881b5a5e","Type":"ContainerDied","Data":"b2727b722e3a70afd501ccad98a0252f007abdf45aee111a177319c19bc76edf"} Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.251405 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6sf5" event={"ID":"3fcf10a5-8a06-4542-9839-91e2881b5a5e","Type":"ContainerStarted","Data":"3e560638d52f9384f04418e922057c7fd7abaae33aadc4528712ea933471999f"} Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.353375 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-g4h2m"] Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.354651 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.357907 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.372411 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g4h2m"] Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.485128 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfb7d\" (UniqueName: \"kubernetes.io/projected/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-kube-api-access-mfb7d\") pod \"redhat-operators-g4h2m\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.485190 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-utilities\") pod \"redhat-operators-g4h2m\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.485221 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-catalog-content\") pod \"redhat-operators-g4h2m\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.586360 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfb7d\" (UniqueName: \"kubernetes.io/projected/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-kube-api-access-mfb7d\") pod \"redhat-operators-g4h2m\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.586412 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-utilities\") pod \"redhat-operators-g4h2m\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.586439 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-catalog-content\") pod \"redhat-operators-g4h2m\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.586943 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-catalog-content\") pod \"redhat-operators-g4h2m\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.587590 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-utilities\") pod \"redhat-operators-g4h2m\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.612351 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfb7d\" (UniqueName: \"kubernetes.io/projected/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-kube-api-access-mfb7d\") pod \"redhat-operators-g4h2m\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:33 crc kubenswrapper[4946]: I1204 15:08:33.669580 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.107071 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g4h2m"] Dec 04 15:08:34 crc kubenswrapper[4946]: W1204 15:08:34.114745 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6380379a_3b27_4d4a_a8b9_9b8c72dcde44.slice/crio-e355f80a7d786e81f4b829c1983ff2233ff5b2faa97abc532c8e7199ec524912 WatchSource:0}: Error finding container e355f80a7d786e81f4b829c1983ff2233ff5b2faa97abc532c8e7199ec524912: Status 404 returned error can't find the container with id e355f80a7d786e81f4b829c1983ff2233ff5b2faa97abc532c8e7199ec524912 Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.266496 4946 generic.go:334] "Generic (PLEG): container finished" podID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" containerID="63e6ac2f473f65dd76e3a95644e80f7763ad3935b3d777c3330466255b9de868" exitCode=0 Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.266634 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6sf5" event={"ID":"3fcf10a5-8a06-4542-9839-91e2881b5a5e","Type":"ContainerDied","Data":"63e6ac2f473f65dd76e3a95644e80f7763ad3935b3d777c3330466255b9de868"} Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.271530 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g4h2m" event={"ID":"6380379a-3b27-4d4a-a8b9-9b8c72dcde44","Type":"ContainerStarted","Data":"e355f80a7d786e81f4b829c1983ff2233ff5b2faa97abc532c8e7199ec524912"} Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.276990 4946 generic.go:334] "Generic (PLEG): container finished" podID="64dc1b8a-cfaa-435d-a093-fff34239250b" containerID="21151d7842a260b1a3425ce0844e972373f7334067766c78c45cd23c7be3f55a" exitCode=0 Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.277042 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mgklh" event={"ID":"64dc1b8a-cfaa-435d-a093-fff34239250b","Type":"ContainerDied","Data":"21151d7842a260b1a3425ce0844e972373f7334067766c78c45cd23c7be3f55a"} Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.348899 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8b6qp"] Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.358965 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.367654 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.380965 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8b6qp"] Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.398102 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9e9c2ed-d146-49d0-94b7-e244eff03321-catalog-content\") pod \"community-operators-8b6qp\" (UID: \"b9e9c2ed-d146-49d0-94b7-e244eff03321\") " pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.398161 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9e9c2ed-d146-49d0-94b7-e244eff03321-utilities\") pod \"community-operators-8b6qp\" (UID: \"b9e9c2ed-d146-49d0-94b7-e244eff03321\") " pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.398202 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5kmx\" (UniqueName: \"kubernetes.io/projected/b9e9c2ed-d146-49d0-94b7-e244eff03321-kube-api-access-w5kmx\") pod \"community-operators-8b6qp\" (UID: \"b9e9c2ed-d146-49d0-94b7-e244eff03321\") " pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.499175 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9e9c2ed-d146-49d0-94b7-e244eff03321-catalog-content\") pod \"community-operators-8b6qp\" (UID: \"b9e9c2ed-d146-49d0-94b7-e244eff03321\") " pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.499228 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9e9c2ed-d146-49d0-94b7-e244eff03321-utilities\") pod \"community-operators-8b6qp\" (UID: \"b9e9c2ed-d146-49d0-94b7-e244eff03321\") " pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.499291 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5kmx\" (UniqueName: \"kubernetes.io/projected/b9e9c2ed-d146-49d0-94b7-e244eff03321-kube-api-access-w5kmx\") pod \"community-operators-8b6qp\" (UID: \"b9e9c2ed-d146-49d0-94b7-e244eff03321\") " pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.499696 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9e9c2ed-d146-49d0-94b7-e244eff03321-catalog-content\") pod \"community-operators-8b6qp\" (UID: \"b9e9c2ed-d146-49d0-94b7-e244eff03321\") " pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.499726 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9e9c2ed-d146-49d0-94b7-e244eff03321-utilities\") pod \"community-operators-8b6qp\" (UID: \"b9e9c2ed-d146-49d0-94b7-e244eff03321\") " pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.521280 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5kmx\" (UniqueName: \"kubernetes.io/projected/b9e9c2ed-d146-49d0-94b7-e244eff03321-kube-api-access-w5kmx\") pod \"community-operators-8b6qp\" (UID: \"b9e9c2ed-d146-49d0-94b7-e244eff03321\") " pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:34 crc kubenswrapper[4946]: I1204 15:08:34.682934 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:35 crc kubenswrapper[4946]: I1204 15:08:35.096548 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8b6qp"] Dec 04 15:08:35 crc kubenswrapper[4946]: W1204 15:08:35.104681 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9e9c2ed_d146_49d0_94b7_e244eff03321.slice/crio-dfd15966be6297da14fd34136374b52f478e535563caa77ddb1093701c9db9ea WatchSource:0}: Error finding container dfd15966be6297da14fd34136374b52f478e535563caa77ddb1093701c9db9ea: Status 404 returned error can't find the container with id dfd15966be6297da14fd34136374b52f478e535563caa77ddb1093701c9db9ea Dec 04 15:08:35 crc kubenswrapper[4946]: I1204 15:08:35.288146 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mgklh" event={"ID":"64dc1b8a-cfaa-435d-a093-fff34239250b","Type":"ContainerStarted","Data":"fd1c8131b785f7df4ceed513e7e0655866c152572cfa4a16f704b6bce5b80e09"} Dec 04 15:08:35 crc kubenswrapper[4946]: I1204 15:08:35.290270 4946 generic.go:334] "Generic (PLEG): container finished" podID="b9e9c2ed-d146-49d0-94b7-e244eff03321" containerID="91be4803d81b0a6ce4a4b42975111eb506609a2cc0c58cb96c9c1b399aece284" exitCode=0 Dec 04 15:08:35 crc kubenswrapper[4946]: I1204 15:08:35.290344 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8b6qp" event={"ID":"b9e9c2ed-d146-49d0-94b7-e244eff03321","Type":"ContainerDied","Data":"91be4803d81b0a6ce4a4b42975111eb506609a2cc0c58cb96c9c1b399aece284"} Dec 04 15:08:35 crc kubenswrapper[4946]: I1204 15:08:35.290408 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8b6qp" event={"ID":"b9e9c2ed-d146-49d0-94b7-e244eff03321","Type":"ContainerStarted","Data":"dfd15966be6297da14fd34136374b52f478e535563caa77ddb1093701c9db9ea"} Dec 04 15:08:35 crc kubenswrapper[4946]: I1204 15:08:35.293571 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6sf5" event={"ID":"3fcf10a5-8a06-4542-9839-91e2881b5a5e","Type":"ContainerStarted","Data":"837bd62a27c153a920de1b911c25e1dac4784a0f9ee8e508959d780b25fdb8e1"} Dec 04 15:08:35 crc kubenswrapper[4946]: I1204 15:08:35.296442 4946 generic.go:334] "Generic (PLEG): container finished" podID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" containerID="b3ab195111de5507b5611ce60a72666422d2b1be4841fb5a5350ee918f640890" exitCode=0 Dec 04 15:08:35 crc kubenswrapper[4946]: I1204 15:08:35.296509 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g4h2m" event={"ID":"6380379a-3b27-4d4a-a8b9-9b8c72dcde44","Type":"ContainerDied","Data":"b3ab195111de5507b5611ce60a72666422d2b1be4841fb5a5350ee918f640890"} Dec 04 15:08:35 crc kubenswrapper[4946]: I1204 15:08:35.317613 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mgklh" podStartSLOduration=2.804041287 podStartE2EDuration="4.317583781s" podCreationTimestamp="2025-12-04 15:08:31 +0000 UTC" firstStartedPulling="2025-12-04 15:08:33.248314549 +0000 UTC m=+364.134358210" lastFinishedPulling="2025-12-04 15:08:34.761857063 +0000 UTC m=+365.647900704" observedRunningTime="2025-12-04 15:08:35.317303613 +0000 UTC m=+366.203347274" watchObservedRunningTime="2025-12-04 15:08:35.317583781 +0000 UTC m=+366.203627422" Dec 04 15:08:35 crc kubenswrapper[4946]: I1204 15:08:35.385529 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l6sf5" podStartSLOduration=2.941712206 podStartE2EDuration="4.385505204s" podCreationTimestamp="2025-12-04 15:08:31 +0000 UTC" firstStartedPulling="2025-12-04 15:08:33.253850861 +0000 UTC m=+364.139894502" lastFinishedPulling="2025-12-04 15:08:34.697643859 +0000 UTC m=+365.583687500" observedRunningTime="2025-12-04 15:08:35.381856817 +0000 UTC m=+366.267900458" watchObservedRunningTime="2025-12-04 15:08:35.385505204 +0000 UTC m=+366.271548845" Dec 04 15:08:36 crc kubenswrapper[4946]: I1204 15:08:36.307193 4946 generic.go:334] "Generic (PLEG): container finished" podID="b9e9c2ed-d146-49d0-94b7-e244eff03321" containerID="c29ebdac56b744758b9d80d9041aff553363e6612037fe7f4e02fc17afd21d8d" exitCode=0 Dec 04 15:08:36 crc kubenswrapper[4946]: I1204 15:08:36.307357 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8b6qp" event={"ID":"b9e9c2ed-d146-49d0-94b7-e244eff03321","Type":"ContainerDied","Data":"c29ebdac56b744758b9d80d9041aff553363e6612037fe7f4e02fc17afd21d8d"} Dec 04 15:08:36 crc kubenswrapper[4946]: I1204 15:08:36.309515 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g4h2m" event={"ID":"6380379a-3b27-4d4a-a8b9-9b8c72dcde44","Type":"ContainerStarted","Data":"ceb68d6fd37585055dc60d0ee69088b52d54c0e53a32ed7025dc6452050e2a1b"} Dec 04 15:08:37 crc kubenswrapper[4946]: I1204 15:08:37.318462 4946 generic.go:334] "Generic (PLEG): container finished" podID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" containerID="ceb68d6fd37585055dc60d0ee69088b52d54c0e53a32ed7025dc6452050e2a1b" exitCode=0 Dec 04 15:08:37 crc kubenswrapper[4946]: I1204 15:08:37.318553 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g4h2m" event={"ID":"6380379a-3b27-4d4a-a8b9-9b8c72dcde44","Type":"ContainerDied","Data":"ceb68d6fd37585055dc60d0ee69088b52d54c0e53a32ed7025dc6452050e2a1b"} Dec 04 15:08:37 crc kubenswrapper[4946]: I1204 15:08:37.322401 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8b6qp" event={"ID":"b9e9c2ed-d146-49d0-94b7-e244eff03321","Type":"ContainerStarted","Data":"52d1771fb371935b01d8f5db48cf3f280cb608aac16064f929293b882068056d"} Dec 04 15:08:37 crc kubenswrapper[4946]: I1204 15:08:37.368502 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8b6qp" podStartSLOduration=1.887466334 podStartE2EDuration="3.368478743s" podCreationTimestamp="2025-12-04 15:08:34 +0000 UTC" firstStartedPulling="2025-12-04 15:08:35.291873037 +0000 UTC m=+366.177916678" lastFinishedPulling="2025-12-04 15:08:36.772885456 +0000 UTC m=+367.658929087" observedRunningTime="2025-12-04 15:08:37.366794664 +0000 UTC m=+368.252838305" watchObservedRunningTime="2025-12-04 15:08:37.368478743 +0000 UTC m=+368.254522384" Dec 04 15:08:38 crc kubenswrapper[4946]: I1204 15:08:38.331073 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g4h2m" event={"ID":"6380379a-3b27-4d4a-a8b9-9b8c72dcde44","Type":"ContainerStarted","Data":"6f69e03a9fa32a5f877643bc50d69bc50fbe7e8f074a975c9d6459812d77a781"} Dec 04 15:08:38 crc kubenswrapper[4946]: I1204 15:08:38.359271 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-g4h2m" podStartSLOduration=2.929971741 podStartE2EDuration="5.359241607s" podCreationTimestamp="2025-12-04 15:08:33 +0000 UTC" firstStartedPulling="2025-12-04 15:08:35.298373097 +0000 UTC m=+366.184416738" lastFinishedPulling="2025-12-04 15:08:37.727642963 +0000 UTC m=+368.613686604" observedRunningTime="2025-12-04 15:08:38.354316993 +0000 UTC m=+369.240360634" watchObservedRunningTime="2025-12-04 15:08:38.359241607 +0000 UTC m=+369.245285258" Dec 04 15:08:41 crc kubenswrapper[4946]: I1204 15:08:41.869228 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:41 crc kubenswrapper[4946]: I1204 15:08:41.869525 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:41 crc kubenswrapper[4946]: I1204 15:08:41.918106 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:42 crc kubenswrapper[4946]: I1204 15:08:42.066401 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:42 crc kubenswrapper[4946]: I1204 15:08:42.066481 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:42 crc kubenswrapper[4946]: I1204 15:08:42.114104 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:42 crc kubenswrapper[4946]: I1204 15:08:42.389344 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 15:08:42 crc kubenswrapper[4946]: I1204 15:08:42.400075 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mgklh" Dec 04 15:08:43 crc kubenswrapper[4946]: I1204 15:08:43.670451 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:43 crc kubenswrapper[4946]: I1204 15:08:43.670547 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:43 crc kubenswrapper[4946]: I1204 15:08:43.712571 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:44 crc kubenswrapper[4946]: I1204 15:08:44.423980 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:08:44 crc kubenswrapper[4946]: I1204 15:08:44.683567 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:44 crc kubenswrapper[4946]: I1204 15:08:44.683636 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:44 crc kubenswrapper[4946]: I1204 15:08:44.722749 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:45 crc kubenswrapper[4946]: I1204 15:08:45.416345 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8b6qp" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.314737 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8lk45"] Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.315655 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.341067 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8lk45"] Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.408980 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-registry-certificates\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.409065 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.409099 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4vgk\" (UniqueName: \"kubernetes.io/projected/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-kube-api-access-x4vgk\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.409424 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.409515 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-registry-tls\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.409559 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-trusted-ca\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.409638 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.409779 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-bound-sa-token\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.437361 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.511698 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.511751 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-registry-tls\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.511768 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-trusted-ca\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.511797 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-bound-sa-token\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.511831 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-registry-certificates\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.511854 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.511872 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4vgk\" (UniqueName: \"kubernetes.io/projected/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-kube-api-access-x4vgk\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.512855 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.514007 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-registry-certificates\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.514041 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-trusted-ca\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.520783 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-registry-tls\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.521547 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.532272 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-bound-sa-token\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.533266 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4vgk\" (UniqueName: \"kubernetes.io/projected/a9bf94db-bb66-4011-a3d5-182cc6a0d93b-kube-api-access-x4vgk\") pod \"image-registry-66df7c8f76-8lk45\" (UID: \"a9bf94db-bb66-4011-a3d5-182cc6a0d93b\") " pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:49 crc kubenswrapper[4946]: I1204 15:08:49.640906 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:50 crc kubenswrapper[4946]: I1204 15:08:50.077143 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8lk45"] Dec 04 15:08:50 crc kubenswrapper[4946]: I1204 15:08:50.403904 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" event={"ID":"a9bf94db-bb66-4011-a3d5-182cc6a0d93b","Type":"ContainerStarted","Data":"9c281f75f713a3cb28c4a14e15a3e43f60ce467e6027d7c6e19566ecf9560530"} Dec 04 15:08:52 crc kubenswrapper[4946]: I1204 15:08:52.478771 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:08:52 crc kubenswrapper[4946]: I1204 15:08:52.479246 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:08:55 crc kubenswrapper[4946]: I1204 15:08:55.441267 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" event={"ID":"a9bf94db-bb66-4011-a3d5-182cc6a0d93b","Type":"ContainerStarted","Data":"5e92e6f15db0a77e65f2e258958157ad1538ea13183e6650247d770ba8bed4b4"} Dec 04 15:08:55 crc kubenswrapper[4946]: I1204 15:08:55.442081 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:08:55 crc kubenswrapper[4946]: I1204 15:08:55.471978 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" podStartSLOduration=6.471956955 podStartE2EDuration="6.471956955s" podCreationTimestamp="2025-12-04 15:08:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:08:55.4666597 +0000 UTC m=+386.352703361" watchObservedRunningTime="2025-12-04 15:08:55.471956955 +0000 UTC m=+386.358000596" Dec 04 15:09:03 crc kubenswrapper[4946]: I1204 15:09:03.642787 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-84f6b5785b-j6wmh"] Dec 04 15:09:03 crc kubenswrapper[4946]: I1204 15:09:03.643863 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" podUID="99a0d72f-3fdf-45e4-bb50-c1855a37f6f4" containerName="controller-manager" containerID="cri-o://511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b" gracePeriod=30 Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.038915 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.152309 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-proxy-ca-bundles\") pod \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.152388 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-client-ca\") pod \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.152496 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-serving-cert\") pod \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.152550 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-config\") pod \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.152604 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6brp\" (UniqueName: \"kubernetes.io/projected/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-kube-api-access-l6brp\") pod \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\" (UID: \"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4\") " Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.153758 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "99a0d72f-3fdf-45e4-bb50-c1855a37f6f4" (UID: "99a0d72f-3fdf-45e4-bb50-c1855a37f6f4"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.153769 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-client-ca" (OuterVolumeSpecName: "client-ca") pod "99a0d72f-3fdf-45e4-bb50-c1855a37f6f4" (UID: "99a0d72f-3fdf-45e4-bb50-c1855a37f6f4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.154404 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-config" (OuterVolumeSpecName: "config") pod "99a0d72f-3fdf-45e4-bb50-c1855a37f6f4" (UID: "99a0d72f-3fdf-45e4-bb50-c1855a37f6f4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.161922 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-kube-api-access-l6brp" (OuterVolumeSpecName: "kube-api-access-l6brp") pod "99a0d72f-3fdf-45e4-bb50-c1855a37f6f4" (UID: "99a0d72f-3fdf-45e4-bb50-c1855a37f6f4"). InnerVolumeSpecName "kube-api-access-l6brp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.160482 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "99a0d72f-3fdf-45e4-bb50-c1855a37f6f4" (UID: "99a0d72f-3fdf-45e4-bb50-c1855a37f6f4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.255141 4946 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.255206 4946 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.255221 4946 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.255234 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.255249 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6brp\" (UniqueName: \"kubernetes.io/projected/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4-kube-api-access-l6brp\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.501643 4946 generic.go:334] "Generic (PLEG): container finished" podID="99a0d72f-3fdf-45e4-bb50-c1855a37f6f4" containerID="511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b" exitCode=0 Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.501695 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" event={"ID":"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4","Type":"ContainerDied","Data":"511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b"} Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.501740 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" event={"ID":"99a0d72f-3fdf-45e4-bb50-c1855a37f6f4","Type":"ContainerDied","Data":"61a9ca4500f0a378574563e8e43aa2fd68fc3133c4e3860090133642e2d25d9f"} Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.501745 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84f6b5785b-j6wmh" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.501770 4946 scope.go:117] "RemoveContainer" containerID="511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.526942 4946 scope.go:117] "RemoveContainer" containerID="511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b" Dec 04 15:09:04 crc kubenswrapper[4946]: E1204 15:09:04.528339 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b\": container with ID starting with 511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b not found: ID does not exist" containerID="511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.528384 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b"} err="failed to get container status \"511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b\": rpc error: code = NotFound desc = could not find container \"511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b\": container with ID starting with 511cc2e2e1b6e8db32e4442f4d80187920cb9d4ab7983b479bacf802da99d75b not found: ID does not exist" Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.541316 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-84f6b5785b-j6wmh"] Dec 04 15:09:04 crc kubenswrapper[4946]: I1204 15:09:04.545684 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-84f6b5785b-j6wmh"] Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.028314 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f649c9466-lt2jk"] Dec 04 15:09:05 crc kubenswrapper[4946]: E1204 15:09:05.028588 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99a0d72f-3fdf-45e4-bb50-c1855a37f6f4" containerName="controller-manager" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.028605 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="99a0d72f-3fdf-45e4-bb50-c1855a37f6f4" containerName="controller-manager" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.028728 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="99a0d72f-3fdf-45e4-bb50-c1855a37f6f4" containerName="controller-manager" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.029286 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.033804 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.034654 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.035250 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.035272 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.035878 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.037702 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.049912 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f649c9466-lt2jk"] Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.058476 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.068177 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-config\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.068290 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-client-ca\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.068324 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cjnr\" (UniqueName: \"kubernetes.io/projected/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-kube-api-access-9cjnr\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.068349 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-serving-cert\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.068441 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-proxy-ca-bundles\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.170139 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-proxy-ca-bundles\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.170217 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-config\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.170279 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-client-ca\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.170302 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cjnr\" (UniqueName: \"kubernetes.io/projected/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-kube-api-access-9cjnr\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.170331 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-serving-cert\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.171620 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-client-ca\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.172197 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-proxy-ca-bundles\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.172510 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-config\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.175664 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-serving-cert\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.187037 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cjnr\" (UniqueName: \"kubernetes.io/projected/1e1dd5bd-9582-43c7-bdda-9829ef4acadc-kube-api-access-9cjnr\") pod \"controller-manager-7f649c9466-lt2jk\" (UID: \"1e1dd5bd-9582-43c7-bdda-9829ef4acadc\") " pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.351774 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.465579 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99a0d72f-3fdf-45e4-bb50-c1855a37f6f4" path="/var/lib/kubelet/pods/99a0d72f-3fdf-45e4-bb50-c1855a37f6f4/volumes" Dec 04 15:09:05 crc kubenswrapper[4946]: I1204 15:09:05.600945 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f649c9466-lt2jk"] Dec 04 15:09:05 crc kubenswrapper[4946]: W1204 15:09:05.609756 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e1dd5bd_9582_43c7_bdda_9829ef4acadc.slice/crio-6281b047e788bf6859d52002155512c8174b0acd380d5c9f99022ebd595245db WatchSource:0}: Error finding container 6281b047e788bf6859d52002155512c8174b0acd380d5c9f99022ebd595245db: Status 404 returned error can't find the container with id 6281b047e788bf6859d52002155512c8174b0acd380d5c9f99022ebd595245db Dec 04 15:09:06 crc kubenswrapper[4946]: I1204 15:09:06.524653 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" event={"ID":"1e1dd5bd-9582-43c7-bdda-9829ef4acadc","Type":"ContainerStarted","Data":"16bbe0042704d4094b6a550cb3023ec7d66a0822520f1813da9709fd573f3481"} Dec 04 15:09:06 crc kubenswrapper[4946]: I1204 15:09:06.524979 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" event={"ID":"1e1dd5bd-9582-43c7-bdda-9829ef4acadc","Type":"ContainerStarted","Data":"6281b047e788bf6859d52002155512c8174b0acd380d5c9f99022ebd595245db"} Dec 04 15:09:06 crc kubenswrapper[4946]: I1204 15:09:06.525008 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:06 crc kubenswrapper[4946]: I1204 15:09:06.529736 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" Dec 04 15:09:06 crc kubenswrapper[4946]: I1204 15:09:06.546166 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f649c9466-lt2jk" podStartSLOduration=3.54613875 podStartE2EDuration="3.54613875s" podCreationTimestamp="2025-12-04 15:09:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:09:06.544176936 +0000 UTC m=+397.430220587" watchObservedRunningTime="2025-12-04 15:09:06.54613875 +0000 UTC m=+397.432182411" Dec 04 15:09:19 crc kubenswrapper[4946]: I1204 15:09:19.647328 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-8lk45" Dec 04 15:09:19 crc kubenswrapper[4946]: I1204 15:09:19.706827 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-56jhv"] Dec 04 15:09:22 crc kubenswrapper[4946]: I1204 15:09:22.479422 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:09:22 crc kubenswrapper[4946]: I1204 15:09:22.480005 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:09:22 crc kubenswrapper[4946]: I1204 15:09:22.480177 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:09:22 crc kubenswrapper[4946]: I1204 15:09:22.481784 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"34fd78eefbc4faaf18027485e3a960a582716ccf43fd9f02cdf83bf0a757e5e8"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:09:22 crc kubenswrapper[4946]: I1204 15:09:22.481923 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://34fd78eefbc4faaf18027485e3a960a582716ccf43fd9f02cdf83bf0a757e5e8" gracePeriod=600 Dec 04 15:09:22 crc kubenswrapper[4946]: I1204 15:09:22.630645 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="34fd78eefbc4faaf18027485e3a960a582716ccf43fd9f02cdf83bf0a757e5e8" exitCode=0 Dec 04 15:09:22 crc kubenswrapper[4946]: I1204 15:09:22.630731 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"34fd78eefbc4faaf18027485e3a960a582716ccf43fd9f02cdf83bf0a757e5e8"} Dec 04 15:09:22 crc kubenswrapper[4946]: I1204 15:09:22.630808 4946 scope.go:117] "RemoveContainer" containerID="615b18f8c516dcb6ca6e2d33dcc8177d49a61a9f2b9b08aa4618294d0c0aba13" Dec 04 15:09:23 crc kubenswrapper[4946]: I1204 15:09:23.638936 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"e87ab8d53f5aa9b89123a8ac882de8b8a67a0f850ea3ff58023f9e8d50c74ba2"} Dec 04 15:09:44 crc kubenswrapper[4946]: I1204 15:09:44.760260 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" podUID="0ae46332-ca8f-4850-96bc-ca2d408b51d3" containerName="registry" containerID="cri-o://ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d" gracePeriod=30 Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.174894 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.271345 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-bound-sa-token\") pod \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.271462 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0ae46332-ca8f-4850-96bc-ca2d408b51d3-installation-pull-secrets\") pod \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.271747 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-trusted-ca\") pod \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.271801 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-certificates\") pod \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.271920 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xqf6\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-kube-api-access-7xqf6\") pod \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.273432 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "0ae46332-ca8f-4850-96bc-ca2d408b51d3" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.274260 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.274392 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-tls\") pod \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.274494 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0ae46332-ca8f-4850-96bc-ca2d408b51d3-ca-trust-extracted\") pod \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\" (UID: \"0ae46332-ca8f-4850-96bc-ca2d408b51d3\") " Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.276835 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "0ae46332-ca8f-4850-96bc-ca2d408b51d3" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.280578 4946 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.280728 4946 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.286146 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ae46332-ca8f-4850-96bc-ca2d408b51d3-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "0ae46332-ca8f-4850-96bc-ca2d408b51d3" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.289206 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-kube-api-access-7xqf6" (OuterVolumeSpecName: "kube-api-access-7xqf6") pod "0ae46332-ca8f-4850-96bc-ca2d408b51d3" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3"). InnerVolumeSpecName "kube-api-access-7xqf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.291184 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "0ae46332-ca8f-4850-96bc-ca2d408b51d3" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.291647 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "0ae46332-ca8f-4850-96bc-ca2d408b51d3" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.299048 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "0ae46332-ca8f-4850-96bc-ca2d408b51d3" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.303566 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ae46332-ca8f-4850-96bc-ca2d408b51d3-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "0ae46332-ca8f-4850-96bc-ca2d408b51d3" (UID: "0ae46332-ca8f-4850-96bc-ca2d408b51d3"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.382385 4946 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.382451 4946 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0ae46332-ca8f-4850-96bc-ca2d408b51d3-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.382467 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xqf6\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-kube-api-access-7xqf6\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.382481 4946 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0ae46332-ca8f-4850-96bc-ca2d408b51d3-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.382493 4946 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0ae46332-ca8f-4850-96bc-ca2d408b51d3-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.785877 4946 generic.go:334] "Generic (PLEG): container finished" podID="0ae46332-ca8f-4850-96bc-ca2d408b51d3" containerID="ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d" exitCode=0 Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.785949 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" event={"ID":"0ae46332-ca8f-4850-96bc-ca2d408b51d3","Type":"ContainerDied","Data":"ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d"} Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.786000 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" event={"ID":"0ae46332-ca8f-4850-96bc-ca2d408b51d3","Type":"ContainerDied","Data":"1b66f669918db115e91c3af26b1c66b1830a8b03f57d312d183cb01ed4c13950"} Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.786042 4946 scope.go:117] "RemoveContainer" containerID="ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.786075 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-56jhv" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.814866 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-56jhv"] Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.822371 4946 scope.go:117] "RemoveContainer" containerID="ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d" Dec 04 15:09:45 crc kubenswrapper[4946]: E1204 15:09:45.823240 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d\": container with ID starting with ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d not found: ID does not exist" containerID="ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d" Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.823280 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-56jhv"] Dec 04 15:09:45 crc kubenswrapper[4946]: I1204 15:09:45.823304 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d"} err="failed to get container status \"ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d\": rpc error: code = NotFound desc = could not find container \"ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d\": container with ID starting with ca2c8118ade39561437598d00aff215f2505875cbb16d9ab8b23365263ecb18d not found: ID does not exist" Dec 04 15:09:47 crc kubenswrapper[4946]: I1204 15:09:47.460858 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ae46332-ca8f-4850-96bc-ca2d408b51d3" path="/var/lib/kubelet/pods/0ae46332-ca8f-4850-96bc-ca2d408b51d3/volumes" Dec 04 15:11:29 crc kubenswrapper[4946]: I1204 15:11:29.613030 4946 scope.go:117] "RemoveContainer" containerID="01e4936e75999e2d560020435cb98c8f4cdc538ba011f2212c658784b5ffd270" Dec 04 15:11:29 crc kubenswrapper[4946]: I1204 15:11:29.635380 4946 scope.go:117] "RemoveContainer" containerID="60edc69588b32fe369e4a8adcd1d3ac406044918c6dee59389db4d7ecbcb0257" Dec 04 15:11:29 crc kubenswrapper[4946]: I1204 15:11:29.657751 4946 scope.go:117] "RemoveContainer" containerID="b7ae1d683095b1730459bcd981d11898ca955a57b47fa8612f41f2163b250e24" Dec 04 15:11:29 crc kubenswrapper[4946]: I1204 15:11:29.681662 4946 scope.go:117] "RemoveContainer" containerID="dd5749d0eba0fadc137ed3019b517bc7f496580ddde20187785b7ceef42904cf" Dec 04 15:11:29 crc kubenswrapper[4946]: I1204 15:11:29.698459 4946 scope.go:117] "RemoveContainer" containerID="2fa459994684bb66d3cf1cb5cb34ddddccc157e449962144dc5b25f9178863ef" Dec 04 15:11:52 crc kubenswrapper[4946]: I1204 15:11:52.478957 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:11:52 crc kubenswrapper[4946]: I1204 15:11:52.481109 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:12:22 crc kubenswrapper[4946]: I1204 15:12:22.479219 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:12:22 crc kubenswrapper[4946]: I1204 15:12:22.480156 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:12:29 crc kubenswrapper[4946]: I1204 15:12:29.733953 4946 scope.go:117] "RemoveContainer" containerID="412175715a6662718e4875aba9eba1b3c486684064a2e821e193eced06a5d3e7" Dec 04 15:12:29 crc kubenswrapper[4946]: I1204 15:12:29.764809 4946 scope.go:117] "RemoveContainer" containerID="91958b72d326f4106921c520f0b316281727518bc5d4704ba6e138d61acd2335" Dec 04 15:12:29 crc kubenswrapper[4946]: I1204 15:12:29.788996 4946 scope.go:117] "RemoveContainer" containerID="2b442f0743d9e4659f97cb863caee7b13cb7e257a4d49f89f7d6063feeb5fbc0" Dec 04 15:12:52 crc kubenswrapper[4946]: I1204 15:12:52.479138 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:12:52 crc kubenswrapper[4946]: I1204 15:12:52.480010 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:12:52 crc kubenswrapper[4946]: I1204 15:12:52.480070 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:12:53 crc kubenswrapper[4946]: I1204 15:12:53.146625 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e87ab8d53f5aa9b89123a8ac882de8b8a67a0f850ea3ff58023f9e8d50c74ba2"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:12:53 crc kubenswrapper[4946]: I1204 15:12:53.147098 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://e87ab8d53f5aa9b89123a8ac882de8b8a67a0f850ea3ff58023f9e8d50c74ba2" gracePeriod=600 Dec 04 15:12:54 crc kubenswrapper[4946]: I1204 15:12:54.157746 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="e87ab8d53f5aa9b89123a8ac882de8b8a67a0f850ea3ff58023f9e8d50c74ba2" exitCode=0 Dec 04 15:12:54 crc kubenswrapper[4946]: I1204 15:12:54.157806 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"e87ab8d53f5aa9b89123a8ac882de8b8a67a0f850ea3ff58023f9e8d50c74ba2"} Dec 04 15:12:54 crc kubenswrapper[4946]: I1204 15:12:54.158106 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"4a5d66e82f6e47d86ff02c63e947477c5c01ef6d0d42318658f454eca8014377"} Dec 04 15:12:54 crc kubenswrapper[4946]: I1204 15:12:54.158146 4946 scope.go:117] "RemoveContainer" containerID="34fd78eefbc4faaf18027485e3a960a582716ccf43fd9f02cdf83bf0a757e5e8" Dec 04 15:13:29 crc kubenswrapper[4946]: I1204 15:13:29.824384 4946 scope.go:117] "RemoveContainer" containerID="8a1eaff474378f6984f2824129d2075fcd592707a9632c8751552b805e03a97a" Dec 04 15:13:29 crc kubenswrapper[4946]: I1204 15:13:29.859616 4946 scope.go:117] "RemoveContainer" containerID="f13e1ed4d91b07976e087c3006e87b65ea0cb094f53b49799c938467c66bca8f" Dec 04 15:13:29 crc kubenswrapper[4946]: I1204 15:13:29.877409 4946 scope.go:117] "RemoveContainer" containerID="8ff5d0de19a536206b38267bfa2e1cf79b4b48c7f8f8c48050e119d0b8e3d59a" Dec 04 15:13:29 crc kubenswrapper[4946]: I1204 15:13:29.893743 4946 scope.go:117] "RemoveContainer" containerID="1268cd3144473fc88da36fc8555082e9b8c28d645ba1bf35b453fd93dc5825e2" Dec 04 15:13:29 crc kubenswrapper[4946]: I1204 15:13:29.912436 4946 scope.go:117] "RemoveContainer" containerID="e992c766db9d9d20751838d21dac482aa691c4f5bd717b1a7a4ebe0bf98b7d39" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.048131 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg"] Dec 04 15:14:16 crc kubenswrapper[4946]: E1204 15:14:16.049305 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ae46332-ca8f-4850-96bc-ca2d408b51d3" containerName="registry" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.049322 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ae46332-ca8f-4850-96bc-ca2d408b51d3" containerName="registry" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.049430 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ae46332-ca8f-4850-96bc-ca2d408b51d3" containerName="registry" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.050385 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.053919 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.070931 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg"] Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.169473 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cdl9\" (UniqueName: \"kubernetes.io/projected/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-kube-api-access-8cdl9\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.169539 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.169575 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.271230 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cdl9\" (UniqueName: \"kubernetes.io/projected/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-kube-api-access-8cdl9\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.271295 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.271320 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.271993 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.272274 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.300596 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cdl9\" (UniqueName: \"kubernetes.io/projected/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-kube-api-access-8cdl9\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.370220 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.598797 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg"] Dec 04 15:14:16 crc kubenswrapper[4946]: I1204 15:14:16.659870 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" event={"ID":"e4caec8c-f2e0-48dd-8138-6bccb6fafb86","Type":"ContainerStarted","Data":"a05edc1e8bfc809b0e197e4c7da9916925419cd2978d98d04dbcb05c320497a6"} Dec 04 15:14:17 crc kubenswrapper[4946]: I1204 15:14:17.666797 4946 generic.go:334] "Generic (PLEG): container finished" podID="e4caec8c-f2e0-48dd-8138-6bccb6fafb86" containerID="6832f1fe63de7b2c79c173a2bc6b697a965ffe1452c0da3960e81901b94aeba6" exitCode=0 Dec 04 15:14:17 crc kubenswrapper[4946]: I1204 15:14:17.666851 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" event={"ID":"e4caec8c-f2e0-48dd-8138-6bccb6fafb86","Type":"ContainerDied","Data":"6832f1fe63de7b2c79c173a2bc6b697a965ffe1452c0da3960e81901b94aeba6"} Dec 04 15:14:17 crc kubenswrapper[4946]: I1204 15:14:17.669064 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 15:14:19 crc kubenswrapper[4946]: I1204 15:14:19.694469 4946 generic.go:334] "Generic (PLEG): container finished" podID="e4caec8c-f2e0-48dd-8138-6bccb6fafb86" containerID="b7681f76dfb41d41dc930eafc1056d44d4c7575f7b1790b2474efca57f4dd4cd" exitCode=0 Dec 04 15:14:19 crc kubenswrapper[4946]: I1204 15:14:19.694546 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" event={"ID":"e4caec8c-f2e0-48dd-8138-6bccb6fafb86","Type":"ContainerDied","Data":"b7681f76dfb41d41dc930eafc1056d44d4c7575f7b1790b2474efca57f4dd4cd"} Dec 04 15:14:20 crc kubenswrapper[4946]: I1204 15:14:20.703957 4946 generic.go:334] "Generic (PLEG): container finished" podID="e4caec8c-f2e0-48dd-8138-6bccb6fafb86" containerID="40d620f16f17cd1fbbb155bbf79dd04dd8b74c0be1c7f2d63e88ec42bcb836cf" exitCode=0 Dec 04 15:14:20 crc kubenswrapper[4946]: I1204 15:14:20.704057 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" event={"ID":"e4caec8c-f2e0-48dd-8138-6bccb6fafb86","Type":"ContainerDied","Data":"40d620f16f17cd1fbbb155bbf79dd04dd8b74c0be1c7f2d63e88ec42bcb836cf"} Dec 04 15:14:21 crc kubenswrapper[4946]: I1204 15:14:21.965807 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.156982 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-util\") pod \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.157283 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-bundle\") pod \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.157325 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cdl9\" (UniqueName: \"kubernetes.io/projected/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-kube-api-access-8cdl9\") pod \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\" (UID: \"e4caec8c-f2e0-48dd-8138-6bccb6fafb86\") " Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.159333 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-bundle" (OuterVolumeSpecName: "bundle") pod "e4caec8c-f2e0-48dd-8138-6bccb6fafb86" (UID: "e4caec8c-f2e0-48dd-8138-6bccb6fafb86"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.184859 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-kube-api-access-8cdl9" (OuterVolumeSpecName: "kube-api-access-8cdl9") pod "e4caec8c-f2e0-48dd-8138-6bccb6fafb86" (UID: "e4caec8c-f2e0-48dd-8138-6bccb6fafb86"). InnerVolumeSpecName "kube-api-access-8cdl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.207347 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-util" (OuterVolumeSpecName: "util") pod "e4caec8c-f2e0-48dd-8138-6bccb6fafb86" (UID: "e4caec8c-f2e0-48dd-8138-6bccb6fafb86"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.259319 4946 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.259376 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cdl9\" (UniqueName: \"kubernetes.io/projected/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-kube-api-access-8cdl9\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.259391 4946 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e4caec8c-f2e0-48dd-8138-6bccb6fafb86-util\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.720626 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" event={"ID":"e4caec8c-f2e0-48dd-8138-6bccb6fafb86","Type":"ContainerDied","Data":"a05edc1e8bfc809b0e197e4c7da9916925419cd2978d98d04dbcb05c320497a6"} Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.720707 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a05edc1e8bfc809b0e197e4c7da9916925419cd2978d98d04dbcb05c320497a6" Dec 04 15:14:22 crc kubenswrapper[4946]: I1204 15:14:22.720728 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.104986 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-w598m"] Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.105865 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovn-controller" containerID="cri-o://48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359" gracePeriod=30 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.105979 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="sbdb" containerID="cri-o://94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631" gracePeriod=30 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.106076 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07" gracePeriod=30 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.106180 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="nbdb" containerID="cri-o://ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531" gracePeriod=30 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.106217 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="kube-rbac-proxy-node" containerID="cri-o://e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8" gracePeriod=30 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.106104 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovn-acl-logging" containerID="cri-o://8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b" gracePeriod=30 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.106181 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="northd" containerID="cri-o://a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d" gracePeriod=30 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.158789 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" containerID="cri-o://7a50aab186604f5678ccff903749ddd74758ee16d496f773128aeb0af53f61bd" gracePeriod=30 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.775727 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fjmh5_f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09/kube-multus/2.log" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.776828 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fjmh5_f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09/kube-multus/1.log" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.776891 4946 generic.go:334] "Generic (PLEG): container finished" podID="f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09" containerID="04157dee9b66b9c96469e5f0cdf517501a2e2855eb90759879ca3ded4097554c" exitCode=2 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.776977 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fjmh5" event={"ID":"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09","Type":"ContainerDied","Data":"04157dee9b66b9c96469e5f0cdf517501a2e2855eb90759879ca3ded4097554c"} Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.777021 4946 scope.go:117] "RemoveContainer" containerID="482f45a4b06addcfe6d528f3e9cb8e4d27938059721fc7a063bb0deb4dd29d96" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.777613 4946 scope.go:117] "RemoveContainer" containerID="04157dee9b66b9c96469e5f0cdf517501a2e2855eb90759879ca3ded4097554c" Dec 04 15:14:28 crc kubenswrapper[4946]: E1204 15:14:28.777789 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-fjmh5_openshift-multus(f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09)\"" pod="openshift-multus/multus-fjmh5" podUID="f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.781773 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovnkube-controller/3.log" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.782557 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/1.log" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.799107 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/0.log" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.800549 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-controller/0.log" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.804916 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="7a50aab186604f5678ccff903749ddd74758ee16d496f773128aeb0af53f61bd" exitCode=0 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.804960 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b" exitCode=143 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.804973 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631" exitCode=0 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.804983 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531" exitCode=0 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.804993 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d" exitCode=0 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.805004 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07" exitCode=0 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.805015 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8" exitCode=0 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.805025 4946 generic.go:334] "Generic (PLEG): container finished" podID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerID="48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359" exitCode=143 Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.805490 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"7a50aab186604f5678ccff903749ddd74758ee16d496f773128aeb0af53f61bd"} Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.805625 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b"} Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.805693 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631"} Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.805750 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531"} Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.805804 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d"} Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.805857 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07"} Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.805909 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8"} Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.805980 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359"} Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.822008 4946 scope.go:117] "RemoveContainer" containerID="0530226d9da096a119c6b86953806212f39dab6d7d4a9b694acc5208701bd3c1" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.874841 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/1.log" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.882344 4946 scope.go:117] "RemoveContainer" containerID="bd5a23fdf26dcba4dfad33db15209d473ddeb873334547a9d0b6846a2150de39" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.886594 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-controller/0.log" Dec 04 15:14:28 crc kubenswrapper[4946]: I1204 15:14:28.887256 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.040793 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-s7shz"] Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041009 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="sbdb" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041022 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="sbdb" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041034 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovn-acl-logging" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041042 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovn-acl-logging" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041049 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4caec8c-f2e0-48dd-8138-6bccb6fafb86" containerName="extract" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041055 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4caec8c-f2e0-48dd-8138-6bccb6fafb86" containerName="extract" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041064 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041070 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041076 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041083 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041092 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovn-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041099 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovn-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041109 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="kube-rbac-proxy-node" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041129 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="kube-rbac-proxy-node" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041136 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="northd" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041142 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="northd" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041151 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4caec8c-f2e0-48dd-8138-6bccb6fafb86" containerName="util" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041158 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4caec8c-f2e0-48dd-8138-6bccb6fafb86" containerName="util" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041166 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovn-acl-logging" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041172 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovn-acl-logging" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041181 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="kube-rbac-proxy-ovn-metrics" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041187 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="kube-rbac-proxy-ovn-metrics" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041196 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="nbdb" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041201 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="nbdb" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041208 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041214 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041223 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="kubecfg-setup" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041229 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="kubecfg-setup" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041238 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4caec8c-f2e0-48dd-8138-6bccb6fafb86" containerName="pull" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041243 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4caec8c-f2e0-48dd-8138-6bccb6fafb86" containerName="pull" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041322 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovn-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041334 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="kube-rbac-proxy-node" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041342 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovn-acl-logging" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041350 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="nbdb" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041359 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="northd" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041365 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="sbdb" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041373 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041379 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041386 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041391 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="kube-rbac-proxy-ovn-metrics" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041398 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041406 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovn-acl-logging" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041414 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4caec8c-f2e0-48dd-8138-6bccb6fafb86" containerName="extract" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041495 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041502 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: E1204 15:14:29.041515 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041520 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.041605 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" containerName="ovnkube-controller" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.043174 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.059812 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-kubelet\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.059874 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-etc-openvswitch\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.059915 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.059944 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovn-node-metrics-cert\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.059972 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.059978 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-env-overrides\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060067 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fd42\" (UniqueName: \"kubernetes.io/projected/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-kube-api-access-8fd42\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060090 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-bin\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060144 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-netns\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060171 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-openvswitch\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060214 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-slash\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060249 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-var-lib-openvswitch\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060279 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-ovn-kubernetes\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060298 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-log-socket\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060318 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-systemd-units\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060320 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060387 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-script-lib\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060408 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060413 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-netd\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060411 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060487 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-node-log\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060497 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060474 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060495 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060544 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060553 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060520 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060578 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-log-socket" (OuterVolumeSpecName: "log-socket") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060592 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-slash" (OuterVolumeSpecName: "host-slash") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060615 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-node-log" (OuterVolumeSpecName: "node-log") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060624 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-config\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060664 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-systemd\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060699 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-ovn\") pod \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\" (UID: \"3537c3df-cdbc-4e1c-aee1-f2d942207a5a\") " Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060626 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060962 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.060976 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061237 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061701 4946 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061725 4946 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-log-socket\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061737 4946 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061748 4946 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061760 4946 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061770 4946 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-node-log\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061782 4946 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061819 4946 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061831 4946 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061842 4946 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061856 4946 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061869 4946 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061881 4946 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061892 4946 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061903 4946 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061914 4946 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-host-slash\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.061923 4946 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.078694 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-kube-api-access-8fd42" (OuterVolumeSpecName: "kube-api-access-8fd42") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "kube-api-access-8fd42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.079844 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.091063 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "3537c3df-cdbc-4e1c-aee1-f2d942207a5a" (UID: "3537c3df-cdbc-4e1c-aee1-f2d942207a5a"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.162639 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-run-ovn-kubernetes\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.162723 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz2v9\" (UniqueName: \"kubernetes.io/projected/7e7a5312-d410-49ed-9ca9-fbd996009ccf-kube-api-access-wz2v9\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.162756 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-cni-netd\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.162786 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-log-socket\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.162812 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7e7a5312-d410-49ed-9ca9-fbd996009ccf-ovn-node-metrics-cert\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.162839 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7e7a5312-d410-49ed-9ca9-fbd996009ccf-ovnkube-script-lib\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.162885 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-run-netns\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.162914 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-etc-openvswitch\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.162953 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-slash\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.162984 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-cni-bin\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163013 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-var-lib-openvswitch\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163036 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7e7a5312-d410-49ed-9ca9-fbd996009ccf-env-overrides\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163063 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163088 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7e7a5312-d410-49ed-9ca9-fbd996009ccf-ovnkube-config\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163159 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-kubelet\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163291 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-run-ovn\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163362 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-run-openvswitch\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163400 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-systemd-units\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163429 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-node-log\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163458 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-run-systemd\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163533 4946 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163553 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fd42\" (UniqueName: \"kubernetes.io/projected/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-kube-api-access-8fd42\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.163568 4946 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3537c3df-cdbc-4e1c-aee1-f2d942207a5a-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264391 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7e7a5312-d410-49ed-9ca9-fbd996009ccf-ovn-node-metrics-cert\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264464 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7e7a5312-d410-49ed-9ca9-fbd996009ccf-ovnkube-script-lib\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264514 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-run-netns\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264543 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-etc-openvswitch\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264577 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-slash\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264604 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-cni-bin\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264626 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-var-lib-openvswitch\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264648 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7e7a5312-d410-49ed-9ca9-fbd996009ccf-env-overrides\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264690 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264710 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7e7a5312-d410-49ed-9ca9-fbd996009ccf-ovnkube-config\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264732 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-kubelet\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264757 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-run-ovn\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264779 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-run-openvswitch\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264801 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-systemd-units\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264831 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-node-log\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264860 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-run-systemd\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264891 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-run-ovn-kubernetes\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264915 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz2v9\" (UniqueName: \"kubernetes.io/projected/7e7a5312-d410-49ed-9ca9-fbd996009ccf-kube-api-access-wz2v9\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264947 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-cni-netd\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.264968 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-log-socket\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265053 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-log-socket\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265355 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-slash\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265379 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-node-log\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265469 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-kubelet\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265513 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-run-ovn\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265563 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-run-openvswitch\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265590 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-systemd-units\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265619 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-run-ovn-kubernetes\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265665 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-run-systemd\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265693 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-cni-netd\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265721 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-var-lib-openvswitch\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.265748 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-cni-bin\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.266026 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.266043 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7e7a5312-d410-49ed-9ca9-fbd996009ccf-ovnkube-config\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.266072 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-host-run-netns\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.266095 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7e7a5312-d410-49ed-9ca9-fbd996009ccf-etc-openvswitch\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.266217 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7e7a5312-d410-49ed-9ca9-fbd996009ccf-ovnkube-script-lib\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.266288 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7e7a5312-d410-49ed-9ca9-fbd996009ccf-env-overrides\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.269347 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7e7a5312-d410-49ed-9ca9-fbd996009ccf-ovn-node-metrics-cert\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.292673 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz2v9\" (UniqueName: \"kubernetes.io/projected/7e7a5312-d410-49ed-9ca9-fbd996009ccf-kube-api-access-wz2v9\") pod \"ovnkube-node-s7shz\" (UID: \"7e7a5312-d410-49ed-9ca9-fbd996009ccf\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.357182 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.815649 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-acl-logging/1.log" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.818855 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-w598m_3537c3df-cdbc-4e1c-aee1-f2d942207a5a/ovn-controller/0.log" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.821217 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" event={"ID":"3537c3df-cdbc-4e1c-aee1-f2d942207a5a","Type":"ContainerDied","Data":"97c226b566a4a0e837159d00d000b318d958a153901272fab7a39757e15e10ec"} Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.821274 4946 scope.go:117] "RemoveContainer" containerID="7a50aab186604f5678ccff903749ddd74758ee16d496f773128aeb0af53f61bd" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.821603 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w598m" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.829384 4946 generic.go:334] "Generic (PLEG): container finished" podID="7e7a5312-d410-49ed-9ca9-fbd996009ccf" containerID="bfe435a1a6fd1552960dcf4600998194ef94bc8846b50ac16b7a068fb0abcd27" exitCode=0 Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.829505 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" event={"ID":"7e7a5312-d410-49ed-9ca9-fbd996009ccf","Type":"ContainerDied","Data":"bfe435a1a6fd1552960dcf4600998194ef94bc8846b50ac16b7a068fb0abcd27"} Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.829535 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" event={"ID":"7e7a5312-d410-49ed-9ca9-fbd996009ccf","Type":"ContainerStarted","Data":"df6763b1e5ad574b9dc785edfe126c85b685cbc8b124b08cae7dba24787b260c"} Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.839480 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fjmh5_f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09/kube-multus/2.log" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.855410 4946 scope.go:117] "RemoveContainer" containerID="8465d1e393f673c05d468ccb3f7573bda1b6a3384ad3efbd5a522f892786732b" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.886074 4946 scope.go:117] "RemoveContainer" containerID="94352a7e5e5c24ba6f945cb96fb1960e412b3a9c8bf4e2c404768c91fbff9631" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.906464 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-w598m"] Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.913752 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-w598m"] Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.914588 4946 scope.go:117] "RemoveContainer" containerID="ef72cbf807cd14965196f219638199fd271710bea7e1444776675f7ab8ad1531" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.951626 4946 scope.go:117] "RemoveContainer" containerID="a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.983686 4946 scope.go:117] "RemoveContainer" containerID="691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07" Dec 04 15:14:29 crc kubenswrapper[4946]: I1204 15:14:29.983893 4946 scope.go:117] "RemoveContainer" containerID="48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359" Dec 04 15:14:30 crc kubenswrapper[4946]: I1204 15:14:30.013907 4946 scope.go:117] "RemoveContainer" containerID="e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8" Dec 04 15:14:30 crc kubenswrapper[4946]: I1204 15:14:30.036374 4946 scope.go:117] "RemoveContainer" containerID="64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7" Dec 04 15:14:30 crc kubenswrapper[4946]: I1204 15:14:30.059537 4946 scope.go:117] "RemoveContainer" containerID="48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359" Dec 04 15:14:30 crc kubenswrapper[4946]: E1204 15:14:30.061268 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\": container with ID starting with 48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359 not found: ID does not exist" containerID="48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359" Dec 04 15:14:30 crc kubenswrapper[4946]: I1204 15:14:30.061344 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359"} err="failed to get container status \"48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\": rpc error: code = NotFound desc = could not find container \"48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359\": container with ID starting with 48708a6a1bda32cac829c0b87598c14895d6aa83c9f0d0ac607bbd791f239359 not found: ID does not exist" Dec 04 15:14:30 crc kubenswrapper[4946]: I1204 15:14:30.061376 4946 scope.go:117] "RemoveContainer" containerID="64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7" Dec 04 15:14:30 crc kubenswrapper[4946]: E1204 15:14:30.082379 4946 log.go:32] "RemoveContainer from runtime service failed" err="rpc error: code = Unknown desc = failed to delete container k8s_kubecfg-setup_ovnkube-node-w598m_openshift-ovn-kubernetes_3537c3df-cdbc-4e1c-aee1-f2d942207a5a_0 in pod sandbox 97c226b566a4a0e837159d00d000b318d958a153901272fab7a39757e15e10ec from index: no such id: '64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7'" containerID="64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7" Dec 04 15:14:30 crc kubenswrapper[4946]: E1204 15:14:30.082460 4946 kuberuntime_gc.go:150] "Failed to remove container" err="rpc error: code = Unknown desc = failed to delete container k8s_kubecfg-setup_ovnkube-node-w598m_openshift-ovn-kubernetes_3537c3df-cdbc-4e1c-aee1-f2d942207a5a_0 in pod sandbox 97c226b566a4a0e837159d00d000b318d958a153901272fab7a39757e15e10ec from index: no such id: '64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7'" containerID="64da9d7ea5b13b71eebf00d622bfaa609553e967a44de7366930fc97eb4b89d7" Dec 04 15:14:30 crc kubenswrapper[4946]: I1204 15:14:30.082492 4946 scope.go:117] "RemoveContainer" containerID="a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d" Dec 04 15:14:30 crc kubenswrapper[4946]: E1204 15:14:30.083624 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\": container with ID starting with a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d not found: ID does not exist" containerID="a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d" Dec 04 15:14:30 crc kubenswrapper[4946]: E1204 15:14:30.083658 4946 kuberuntime_gc.go:150] "Failed to remove container" err="failed to get container status \"a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\": rpc error: code = NotFound desc = could not find container \"a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d\": container with ID starting with a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d not found: ID does not exist" containerID="a0ddcd4c909278d1af62309d531ce25e5c2d8d29a527d4eabc1635bd3feb4c5d" Dec 04 15:14:30 crc kubenswrapper[4946]: I1204 15:14:30.083679 4946 scope.go:117] "RemoveContainer" containerID="e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8" Dec 04 15:14:30 crc kubenswrapper[4946]: E1204 15:14:30.086606 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\": container with ID starting with e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8 not found: ID does not exist" containerID="e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8" Dec 04 15:14:30 crc kubenswrapper[4946]: E1204 15:14:30.086682 4946 kuberuntime_gc.go:150] "Failed to remove container" err="failed to get container status \"e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\": rpc error: code = NotFound desc = could not find container \"e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8\": container with ID starting with e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8 not found: ID does not exist" containerID="e4bfd316af31fdc1625482bd8957d4c30c9f7015bd6517c7cdfc9a016948b3c8" Dec 04 15:14:30 crc kubenswrapper[4946]: I1204 15:14:30.086735 4946 scope.go:117] "RemoveContainer" containerID="691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07" Dec 04 15:14:30 crc kubenswrapper[4946]: E1204 15:14:30.087501 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\": container with ID starting with 691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07 not found: ID does not exist" containerID="691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07" Dec 04 15:14:30 crc kubenswrapper[4946]: E1204 15:14:30.087534 4946 kuberuntime_gc.go:150] "Failed to remove container" err="failed to get container status \"691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\": rpc error: code = NotFound desc = could not find container \"691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07\": container with ID starting with 691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07 not found: ID does not exist" containerID="691c90607136fcee84ffa185a5872f730578b60b4cc341e9bf154eac97a5ed07" Dec 04 15:14:30 crc kubenswrapper[4946]: I1204 15:14:30.848892 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" event={"ID":"7e7a5312-d410-49ed-9ca9-fbd996009ccf","Type":"ContainerStarted","Data":"7a32e5f22d7ef3449ab039e90a21a5185790853e669acd54b048345d818b22f6"} Dec 04 15:14:30 crc kubenswrapper[4946]: I1204 15:14:30.849494 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" event={"ID":"7e7a5312-d410-49ed-9ca9-fbd996009ccf","Type":"ContainerStarted","Data":"c8d048eea914370d5ab7ba3548fd92a15bda336a326958a9d6c1241673129f4e"} Dec 04 15:14:30 crc kubenswrapper[4946]: I1204 15:14:30.849522 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" event={"ID":"7e7a5312-d410-49ed-9ca9-fbd996009ccf","Type":"ContainerStarted","Data":"3b697f3699afa628a09a6c987077a86d98341d05f3a545e43a67117a781263d0"} Dec 04 15:14:31 crc kubenswrapper[4946]: I1204 15:14:31.459740 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3537c3df-cdbc-4e1c-aee1-f2d942207a5a" path="/var/lib/kubelet/pods/3537c3df-cdbc-4e1c-aee1-f2d942207a5a/volumes" Dec 04 15:14:31 crc kubenswrapper[4946]: I1204 15:14:31.859542 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" event={"ID":"7e7a5312-d410-49ed-9ca9-fbd996009ccf","Type":"ContainerStarted","Data":"5cebc040e34e5e9e4609e3557b085e97d2a9356e19af52cce926c5062d1dccf9"} Dec 04 15:14:31 crc kubenswrapper[4946]: I1204 15:14:31.859605 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" event={"ID":"7e7a5312-d410-49ed-9ca9-fbd996009ccf","Type":"ContainerStarted","Data":"338c78d5ffe4b3ca69a98cdc6c85f17d7fb04b5fc369c874ece54ef06084bd1f"} Dec 04 15:14:31 crc kubenswrapper[4946]: I1204 15:14:31.859619 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" event={"ID":"7e7a5312-d410-49ed-9ca9-fbd996009ccf","Type":"ContainerStarted","Data":"09e30a7a0b039c5f6e36c2c082e66a2a3b59d8b17a714efdb4f95ab16119cc5b"} Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.126362 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl"] Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.127354 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.131777 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.134910 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-nfj4r" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.136286 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.211975 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr"] Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.213036 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.218852 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5"] Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.219723 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.220395 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-229fx" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.220941 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.332155 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/695e348c-7a92-4a69-b104-1f37361d5c49-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5\" (UID: \"695e348c-7a92-4a69-b104-1f37361d5c49\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.332295 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/695e348c-7a92-4a69-b104-1f37361d5c49-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5\" (UID: \"695e348c-7a92-4a69-b104-1f37361d5c49\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.332330 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c42w8\" (UniqueName: \"kubernetes.io/projected/154e09f2-667a-45bf-abdb-fc3e1f0f0ba6-kube-api-access-c42w8\") pod \"obo-prometheus-operator-668cf9dfbb-tjsxl\" (UID: \"154e09f2-667a-45bf-abdb-fc3e1f0f0ba6\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.332361 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr\" (UID: \"77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.332405 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr\" (UID: \"77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.369893 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-bmpm5"] Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.370866 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.374339 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.374399 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-fqpn4" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.433360 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/695e348c-7a92-4a69-b104-1f37361d5c49-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5\" (UID: \"695e348c-7a92-4a69-b104-1f37361d5c49\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.433416 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c42w8\" (UniqueName: \"kubernetes.io/projected/154e09f2-667a-45bf-abdb-fc3e1f0f0ba6-kube-api-access-c42w8\") pod \"obo-prometheus-operator-668cf9dfbb-tjsxl\" (UID: \"154e09f2-667a-45bf-abdb-fc3e1f0f0ba6\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.433445 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr\" (UID: \"77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.433481 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr\" (UID: \"77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.433514 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/695e348c-7a92-4a69-b104-1f37361d5c49-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5\" (UID: \"695e348c-7a92-4a69-b104-1f37361d5c49\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.442999 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr\" (UID: \"77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.443027 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/695e348c-7a92-4a69-b104-1f37361d5c49-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5\" (UID: \"695e348c-7a92-4a69-b104-1f37361d5c49\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.442999 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr\" (UID: \"77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.445100 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/695e348c-7a92-4a69-b104-1f37361d5c49-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5\" (UID: \"695e348c-7a92-4a69-b104-1f37361d5c49\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.455258 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c42w8\" (UniqueName: \"kubernetes.io/projected/154e09f2-667a-45bf-abdb-fc3e1f0f0ba6-kube-api-access-c42w8\") pod \"obo-prometheus-operator-668cf9dfbb-tjsxl\" (UID: \"154e09f2-667a-45bf-abdb-fc3e1f0f0ba6\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.534895 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7vj6\" (UniqueName: \"kubernetes.io/projected/adb38877-f50c-48aa-a3ca-951150033479-kube-api-access-x7vj6\") pod \"observability-operator-d8bb48f5d-bmpm5\" (UID: \"adb38877-f50c-48aa-a3ca-951150033479\") " pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.535038 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/adb38877-f50c-48aa-a3ca-951150033479-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-bmpm5\" (UID: \"adb38877-f50c-48aa-a3ca-951150033479\") " pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.537313 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.552840 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.597107 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(1424896bd8daeacad8af5f851642e9a963e0509dd96b4e4f79072a6e161c5f58): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.597687 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(1424896bd8daeacad8af5f851642e9a963e0509dd96b4e4f79072a6e161c5f58): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.597720 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(1424896bd8daeacad8af5f851642e9a963e0509dd96b4e4f79072a6e161c5f58): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.597784 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators(77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators(77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(1424896bd8daeacad8af5f851642e9a963e0509dd96b4e4f79072a6e161c5f58): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" podUID="77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.602700 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-rf6pw"] Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.603739 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.614634 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-mkpr4" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.637154 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7vj6\" (UniqueName: \"kubernetes.io/projected/adb38877-f50c-48aa-a3ca-951150033479-kube-api-access-x7vj6\") pod \"observability-operator-d8bb48f5d-bmpm5\" (UID: \"adb38877-f50c-48aa-a3ca-951150033479\") " pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.637240 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/adb38877-f50c-48aa-a3ca-951150033479-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-bmpm5\" (UID: \"adb38877-f50c-48aa-a3ca-951150033479\") " pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.640414 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(54ad36db17b25c32d95bf99ec269f54cbd35886f44bdab7a4d302d71b3ad0bfe): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.640484 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(54ad36db17b25c32d95bf99ec269f54cbd35886f44bdab7a4d302d71b3ad0bfe): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.640513 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(54ad36db17b25c32d95bf99ec269f54cbd35886f44bdab7a4d302d71b3ad0bfe): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.640571 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators(695e348c-7a92-4a69-b104-1f37361d5c49)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators(695e348c-7a92-4a69-b104-1f37361d5c49)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(54ad36db17b25c32d95bf99ec269f54cbd35886f44bdab7a4d302d71b3ad0bfe): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" podUID="695e348c-7a92-4a69-b104-1f37361d5c49" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.646224 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/adb38877-f50c-48aa-a3ca-951150033479-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-bmpm5\" (UID: \"adb38877-f50c-48aa-a3ca-951150033479\") " pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.659904 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7vj6\" (UniqueName: \"kubernetes.io/projected/adb38877-f50c-48aa-a3ca-951150033479-kube-api-access-x7vj6\") pod \"observability-operator-d8bb48f5d-bmpm5\" (UID: \"adb38877-f50c-48aa-a3ca-951150033479\") " pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.685965 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.713405 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(696d6143294e9bb654b35aa9df22687e17e07c58f9aff30ed4f81176d5ef989a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.713509 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(696d6143294e9bb654b35aa9df22687e17e07c58f9aff30ed4f81176d5ef989a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.713537 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(696d6143294e9bb654b35aa9df22687e17e07c58f9aff30ed4f81176d5ef989a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.713604 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-bmpm5_openshift-operators(adb38877-f50c-48aa-a3ca-951150033479)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-bmpm5_openshift-operators(adb38877-f50c-48aa-a3ca-951150033479)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(696d6143294e9bb654b35aa9df22687e17e07c58f9aff30ed4f81176d5ef989a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" podUID="adb38877-f50c-48aa-a3ca-951150033479" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.738599 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/c9624505-3974-47fb-93d7-1a2ff73b29c7-openshift-service-ca\") pod \"perses-operator-5446b9c989-rf6pw\" (UID: \"c9624505-3974-47fb-93d7-1a2ff73b29c7\") " pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.738661 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6f4q\" (UniqueName: \"kubernetes.io/projected/c9624505-3974-47fb-93d7-1a2ff73b29c7-kube-api-access-g6f4q\") pod \"perses-operator-5446b9c989-rf6pw\" (UID: \"c9624505-3974-47fb-93d7-1a2ff73b29c7\") " pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.746551 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.771884 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(dcde7761b5882be52a0a588a0bc5cfa71543605ae2e9a93df6c7333d523d716b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.771978 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(dcde7761b5882be52a0a588a0bc5cfa71543605ae2e9a93df6c7333d523d716b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.772000 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(dcde7761b5882be52a0a588a0bc5cfa71543605ae2e9a93df6c7333d523d716b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:32 crc kubenswrapper[4946]: E1204 15:14:32.772064 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators(154e09f2-667a-45bf-abdb-fc3e1f0f0ba6)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators(154e09f2-667a-45bf-abdb-fc3e1f0f0ba6)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(dcde7761b5882be52a0a588a0bc5cfa71543605ae2e9a93df6c7333d523d716b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" podUID="154e09f2-667a-45bf-abdb-fc3e1f0f0ba6" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.840915 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/c9624505-3974-47fb-93d7-1a2ff73b29c7-openshift-service-ca\") pod \"perses-operator-5446b9c989-rf6pw\" (UID: \"c9624505-3974-47fb-93d7-1a2ff73b29c7\") " pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.841012 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6f4q\" (UniqueName: \"kubernetes.io/projected/c9624505-3974-47fb-93d7-1a2ff73b29c7-kube-api-access-g6f4q\") pod \"perses-operator-5446b9c989-rf6pw\" (UID: \"c9624505-3974-47fb-93d7-1a2ff73b29c7\") " pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.842486 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/c9624505-3974-47fb-93d7-1a2ff73b29c7-openshift-service-ca\") pod \"perses-operator-5446b9c989-rf6pw\" (UID: \"c9624505-3974-47fb-93d7-1a2ff73b29c7\") " pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.863051 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6f4q\" (UniqueName: \"kubernetes.io/projected/c9624505-3974-47fb-93d7-1a2ff73b29c7-kube-api-access-g6f4q\") pod \"perses-operator-5446b9c989-rf6pw\" (UID: \"c9624505-3974-47fb-93d7-1a2ff73b29c7\") " pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:32 crc kubenswrapper[4946]: I1204 15:14:32.992545 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:33 crc kubenswrapper[4946]: E1204 15:14:33.015664 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(749f2a712e9fa47a21a2a2bd5718e10f2640a655c18bca0ec204fe070d989012): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:33 crc kubenswrapper[4946]: E1204 15:14:33.015747 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(749f2a712e9fa47a21a2a2bd5718e10f2640a655c18bca0ec204fe070d989012): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:33 crc kubenswrapper[4946]: E1204 15:14:33.015789 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(749f2a712e9fa47a21a2a2bd5718e10f2640a655c18bca0ec204fe070d989012): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:33 crc kubenswrapper[4946]: E1204 15:14:33.015835 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-rf6pw_openshift-operators(c9624505-3974-47fb-93d7-1a2ff73b29c7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-rf6pw_openshift-operators(c9624505-3974-47fb-93d7-1a2ff73b29c7)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(749f2a712e9fa47a21a2a2bd5718e10f2640a655c18bca0ec204fe070d989012): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" podUID="c9624505-3974-47fb-93d7-1a2ff73b29c7" Dec 04 15:14:33 crc kubenswrapper[4946]: I1204 15:14:33.894426 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" event={"ID":"7e7a5312-d410-49ed-9ca9-fbd996009ccf","Type":"ContainerStarted","Data":"439775c4dffd151c7d68ea30fa1096c02db15f898055c10250296d9dc0b78dfa"} Dec 04 15:14:36 crc kubenswrapper[4946]: I1204 15:14:36.918962 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" event={"ID":"7e7a5312-d410-49ed-9ca9-fbd996009ccf","Type":"ContainerStarted","Data":"52929fc56ab99985874c4e37a83460cc91f15e59da1058b372ee62ceca4d0c8e"} Dec 04 15:14:36 crc kubenswrapper[4946]: I1204 15:14:36.919541 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:36 crc kubenswrapper[4946]: I1204 15:14:36.919558 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:36 crc kubenswrapper[4946]: I1204 15:14:36.919570 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:37 crc kubenswrapper[4946]: I1204 15:14:37.028350 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" podStartSLOduration=8.028328089 podStartE2EDuration="8.028328089s" podCreationTimestamp="2025-12-04 15:14:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:14:37.027537168 +0000 UTC m=+727.913580809" watchObservedRunningTime="2025-12-04 15:14:37.028328089 +0000 UTC m=+727.914371720" Dec 04 15:14:37 crc kubenswrapper[4946]: I1204 15:14:37.144275 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:37 crc kubenswrapper[4946]: I1204 15:14:37.186061 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.358353 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5"] Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.358985 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.359635 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.372924 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-bmpm5"] Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.373359 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.373925 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.457450 4946 scope.go:117] "RemoveContainer" containerID="04157dee9b66b9c96469e5f0cdf517501a2e2855eb90759879ca3ded4097554c" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.458100 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-fjmh5_openshift-multus(f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09)\"" pod="openshift-multus/multus-fjmh5" podUID="f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09" Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.460345 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr"] Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.460458 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.460795 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.505714 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(121d52c1ee0a2708ea8335570ef8a861ec1de682c5ad58687e66a2978045f50e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.505797 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(121d52c1ee0a2708ea8335570ef8a861ec1de682c5ad58687e66a2978045f50e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.505823 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(121d52c1ee0a2708ea8335570ef8a861ec1de682c5ad58687e66a2978045f50e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.505895 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-bmpm5_openshift-operators(adb38877-f50c-48aa-a3ca-951150033479)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-bmpm5_openshift-operators(adb38877-f50c-48aa-a3ca-951150033479)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(121d52c1ee0a2708ea8335570ef8a861ec1de682c5ad58687e66a2978045f50e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" podUID="adb38877-f50c-48aa-a3ca-951150033479" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.518594 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(625034d37e3652ce36c4d39df44973cba4a34293e552b62d266a6ac2336224da): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.518692 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(625034d37e3652ce36c4d39df44973cba4a34293e552b62d266a6ac2336224da): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.518721 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(625034d37e3652ce36c4d39df44973cba4a34293e552b62d266a6ac2336224da): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.518787 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators(695e348c-7a92-4a69-b104-1f37361d5c49)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators(695e348c-7a92-4a69-b104-1f37361d5c49)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(625034d37e3652ce36c4d39df44973cba4a34293e552b62d266a6ac2336224da): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" podUID="695e348c-7a92-4a69-b104-1f37361d5c49" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.561561 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(aaa81b4142c4312cab0a17a14eb139e29bf35aaa54c3e0453540655178daf60e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.561635 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(aaa81b4142c4312cab0a17a14eb139e29bf35aaa54c3e0453540655178daf60e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.561659 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(aaa81b4142c4312cab0a17a14eb139e29bf35aaa54c3e0453540655178daf60e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.561710 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators(77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators(77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(aaa81b4142c4312cab0a17a14eb139e29bf35aaa54c3e0453540655178daf60e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" podUID="77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11" Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.597231 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-rf6pw"] Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.597399 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.598085 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.622844 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl"] Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.623637 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:39 crc kubenswrapper[4946]: I1204 15:14:39.624238 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.709699 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(4494c52032125d8e8535b98241f4962c515e27144866f0c9b0f2c1fc6f45902c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.709934 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(4494c52032125d8e8535b98241f4962c515e27144866f0c9b0f2c1fc6f45902c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.710021 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(4494c52032125d8e8535b98241f4962c515e27144866f0c9b0f2c1fc6f45902c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.710128 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-rf6pw_openshift-operators(c9624505-3974-47fb-93d7-1a2ff73b29c7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-rf6pw_openshift-operators(c9624505-3974-47fb-93d7-1a2ff73b29c7)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(4494c52032125d8e8535b98241f4962c515e27144866f0c9b0f2c1fc6f45902c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" podUID="c9624505-3974-47fb-93d7-1a2ff73b29c7" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.725534 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(c2eceb696897aeb0a9a95f7655c421ed50cb3149e7310cdaa700c9351733e2c9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.725632 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(c2eceb696897aeb0a9a95f7655c421ed50cb3149e7310cdaa700c9351733e2c9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.725656 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(c2eceb696897aeb0a9a95f7655c421ed50cb3149e7310cdaa700c9351733e2c9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:39 crc kubenswrapper[4946]: E1204 15:14:39.725701 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators(154e09f2-667a-45bf-abdb-fc3e1f0f0ba6)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators(154e09f2-667a-45bf-abdb-fc3e1f0f0ba6)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(c2eceb696897aeb0a9a95f7655c421ed50cb3149e7310cdaa700c9351733e2c9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" podUID="154e09f2-667a-45bf-abdb-fc3e1f0f0ba6" Dec 04 15:14:50 crc kubenswrapper[4946]: I1204 15:14:50.452162 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:50 crc kubenswrapper[4946]: I1204 15:14:50.454516 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:50 crc kubenswrapper[4946]: E1204 15:14:50.506472 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(9115ce4b932a458fa38e16745c97b164e4daf5457002d6cbc7afb89ae09e16a6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:50 crc kubenswrapper[4946]: E1204 15:14:50.506566 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(9115ce4b932a458fa38e16745c97b164e4daf5457002d6cbc7afb89ae09e16a6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:50 crc kubenswrapper[4946]: E1204 15:14:50.506589 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(9115ce4b932a458fa38e16745c97b164e4daf5457002d6cbc7afb89ae09e16a6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:14:50 crc kubenswrapper[4946]: E1204 15:14:50.506646 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators(695e348c-7a92-4a69-b104-1f37361d5c49)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators(695e348c-7a92-4a69-b104-1f37361d5c49)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_openshift-operators_695e348c-7a92-4a69-b104-1f37361d5c49_0(9115ce4b932a458fa38e16745c97b164e4daf5457002d6cbc7afb89ae09e16a6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" podUID="695e348c-7a92-4a69-b104-1f37361d5c49" Dec 04 15:14:51 crc kubenswrapper[4946]: I1204 15:14:51.452823 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:51 crc kubenswrapper[4946]: I1204 15:14:51.453626 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:51 crc kubenswrapper[4946]: E1204 15:14:51.481168 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(e60b45ca37c8b341f233076154c9085cd6fb38459bb561fda31463013129a498): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:51 crc kubenswrapper[4946]: E1204 15:14:51.481266 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(e60b45ca37c8b341f233076154c9085cd6fb38459bb561fda31463013129a498): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:51 crc kubenswrapper[4946]: E1204 15:14:51.481297 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(e60b45ca37c8b341f233076154c9085cd6fb38459bb561fda31463013129a498): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:14:51 crc kubenswrapper[4946]: E1204 15:14:51.481361 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-bmpm5_openshift-operators(adb38877-f50c-48aa-a3ca-951150033479)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-bmpm5_openshift-operators(adb38877-f50c-48aa-a3ca-951150033479)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-bmpm5_openshift-operators_adb38877-f50c-48aa-a3ca-951150033479_0(e60b45ca37c8b341f233076154c9085cd6fb38459bb561fda31463013129a498): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" podUID="adb38877-f50c-48aa-a3ca-951150033479" Dec 04 15:14:52 crc kubenswrapper[4946]: I1204 15:14:52.452542 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:52 crc kubenswrapper[4946]: I1204 15:14:52.453280 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:52 crc kubenswrapper[4946]: E1204 15:14:52.483050 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(cb79bb68274ddfb9e1142fd6347e2a01e7df730f627d0264151b66397647f9cd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:52 crc kubenswrapper[4946]: E1204 15:14:52.483465 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(cb79bb68274ddfb9e1142fd6347e2a01e7df730f627d0264151b66397647f9cd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:52 crc kubenswrapper[4946]: E1204 15:14:52.483494 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(cb79bb68274ddfb9e1142fd6347e2a01e7df730f627d0264151b66397647f9cd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:14:52 crc kubenswrapper[4946]: E1204 15:14:52.483554 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators(154e09f2-667a-45bf-abdb-fc3e1f0f0ba6)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators(154e09f2-667a-45bf-abdb-fc3e1f0f0ba6)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-tjsxl_openshift-operators_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6_0(cb79bb68274ddfb9e1142fd6347e2a01e7df730f627d0264151b66397647f9cd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" podUID="154e09f2-667a-45bf-abdb-fc3e1f0f0ba6" Dec 04 15:14:53 crc kubenswrapper[4946]: I1204 15:14:53.452588 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:53 crc kubenswrapper[4946]: I1204 15:14:53.453016 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:53 crc kubenswrapper[4946]: E1204 15:14:53.475752 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(ea222dc2037028942ded36e862f35128e66a5f7f099b2a82702d2e88101cf772): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:53 crc kubenswrapper[4946]: E1204 15:14:53.475832 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(ea222dc2037028942ded36e862f35128e66a5f7f099b2a82702d2e88101cf772): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:53 crc kubenswrapper[4946]: E1204 15:14:53.475857 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(ea222dc2037028942ded36e862f35128e66a5f7f099b2a82702d2e88101cf772): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:14:53 crc kubenswrapper[4946]: E1204 15:14:53.475913 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators(77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators(77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_openshift-operators_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11_0(ea222dc2037028942ded36e862f35128e66a5f7f099b2a82702d2e88101cf772): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" podUID="77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11" Dec 04 15:14:54 crc kubenswrapper[4946]: I1204 15:14:54.453334 4946 scope.go:117] "RemoveContainer" containerID="04157dee9b66b9c96469e5f0cdf517501a2e2855eb90759879ca3ded4097554c" Dec 04 15:14:55 crc kubenswrapper[4946]: I1204 15:14:55.194952 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fjmh5_f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09/kube-multus/2.log" Dec 04 15:14:55 crc kubenswrapper[4946]: I1204 15:14:55.195498 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fjmh5" event={"ID":"f1e6f2b4-3a68-43a0-8ca0-8b00559a9a09","Type":"ContainerStarted","Data":"943d77c5de64212aea9c4f2d95a4ff2309c95aa025a16808c21cd6949e89993d"} Dec 04 15:14:55 crc kubenswrapper[4946]: I1204 15:14:55.452019 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:55 crc kubenswrapper[4946]: I1204 15:14:55.452728 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:55 crc kubenswrapper[4946]: E1204 15:14:55.490203 4946 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(f72a444824b60411d55db5e54a424682df9ea20875eeed215ba899485221cd2c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 15:14:55 crc kubenswrapper[4946]: E1204 15:14:55.490295 4946 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(f72a444824b60411d55db5e54a424682df9ea20875eeed215ba899485221cd2c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:55 crc kubenswrapper[4946]: E1204 15:14:55.490320 4946 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(f72a444824b60411d55db5e54a424682df9ea20875eeed215ba899485221cd2c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:14:55 crc kubenswrapper[4946]: E1204 15:14:55.490380 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-rf6pw_openshift-operators(c9624505-3974-47fb-93d7-1a2ff73b29c7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-rf6pw_openshift-operators(c9624505-3974-47fb-93d7-1a2ff73b29c7)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-rf6pw_openshift-operators_c9624505-3974-47fb-93d7-1a2ff73b29c7_0(f72a444824b60411d55db5e54a424682df9ea20875eeed215ba899485221cd2c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" podUID="c9624505-3974-47fb-93d7-1a2ff73b29c7" Dec 04 15:14:59 crc kubenswrapper[4946]: I1204 15:14:59.387824 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s7shz" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.169972 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj"] Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.170943 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.173800 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.174083 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.189303 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj"] Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.244488 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2clvq\" (UniqueName: \"kubernetes.io/projected/f13c9492-c978-4a41-976b-080495c9d5fc-kube-api-access-2clvq\") pod \"collect-profiles-29414355-pvrlj\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.244592 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f13c9492-c978-4a41-976b-080495c9d5fc-secret-volume\") pod \"collect-profiles-29414355-pvrlj\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.244644 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f13c9492-c978-4a41-976b-080495c9d5fc-config-volume\") pod \"collect-profiles-29414355-pvrlj\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.345432 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2clvq\" (UniqueName: \"kubernetes.io/projected/f13c9492-c978-4a41-976b-080495c9d5fc-kube-api-access-2clvq\") pod \"collect-profiles-29414355-pvrlj\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.345498 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f13c9492-c978-4a41-976b-080495c9d5fc-secret-volume\") pod \"collect-profiles-29414355-pvrlj\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.345533 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f13c9492-c978-4a41-976b-080495c9d5fc-config-volume\") pod \"collect-profiles-29414355-pvrlj\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.346735 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f13c9492-c978-4a41-976b-080495c9d5fc-config-volume\") pod \"collect-profiles-29414355-pvrlj\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.363295 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f13c9492-c978-4a41-976b-080495c9d5fc-secret-volume\") pod \"collect-profiles-29414355-pvrlj\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.369432 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2clvq\" (UniqueName: \"kubernetes.io/projected/f13c9492-c978-4a41-976b-080495c9d5fc-kube-api-access-2clvq\") pod \"collect-profiles-29414355-pvrlj\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.518013 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:00 crc kubenswrapper[4946]: I1204 15:15:00.764861 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj"] Dec 04 15:15:01 crc kubenswrapper[4946]: I1204 15:15:01.241766 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" event={"ID":"f13c9492-c978-4a41-976b-080495c9d5fc","Type":"ContainerStarted","Data":"167214e85128898e21803ae3a7a4f8f5213fbed53ab84fecb3a9d5c9df16930e"} Dec 04 15:15:02 crc kubenswrapper[4946]: I1204 15:15:02.250885 4946 generic.go:334] "Generic (PLEG): container finished" podID="f13c9492-c978-4a41-976b-080495c9d5fc" containerID="7ab184c4cdd2a18d1a8c4436db03e638f01954927c3697df17d235ced7c6e270" exitCode=0 Dec 04 15:15:02 crc kubenswrapper[4946]: I1204 15:15:02.250994 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" event={"ID":"f13c9492-c978-4a41-976b-080495c9d5fc","Type":"ContainerDied","Data":"7ab184c4cdd2a18d1a8c4436db03e638f01954927c3697df17d235ced7c6e270"} Dec 04 15:15:02 crc kubenswrapper[4946]: I1204 15:15:02.452473 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:15:02 crc kubenswrapper[4946]: I1204 15:15:02.453133 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:15:02 crc kubenswrapper[4946]: I1204 15:15:02.698861 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-bmpm5"] Dec 04 15:15:02 crc kubenswrapper[4946]: W1204 15:15:02.709587 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podadb38877_f50c_48aa_a3ca_951150033479.slice/crio-05c21168aaaef9d5290c606acb221189dd49c7367a907e0d8fe9a1782c50afbe WatchSource:0}: Error finding container 05c21168aaaef9d5290c606acb221189dd49c7367a907e0d8fe9a1782c50afbe: Status 404 returned error can't find the container with id 05c21168aaaef9d5290c606acb221189dd49c7367a907e0d8fe9a1782c50afbe Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.259584 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" event={"ID":"adb38877-f50c-48aa-a3ca-951150033479","Type":"ContainerStarted","Data":"05c21168aaaef9d5290c606acb221189dd49c7367a907e0d8fe9a1782c50afbe"} Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.452539 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.453079 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.521888 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.592226 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f13c9492-c978-4a41-976b-080495c9d5fc-config-volume\") pod \"f13c9492-c978-4a41-976b-080495c9d5fc\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.592322 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f13c9492-c978-4a41-976b-080495c9d5fc-secret-volume\") pod \"f13c9492-c978-4a41-976b-080495c9d5fc\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.592363 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2clvq\" (UniqueName: \"kubernetes.io/projected/f13c9492-c978-4a41-976b-080495c9d5fc-kube-api-access-2clvq\") pod \"f13c9492-c978-4a41-976b-080495c9d5fc\" (UID: \"f13c9492-c978-4a41-976b-080495c9d5fc\") " Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.594003 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f13c9492-c978-4a41-976b-080495c9d5fc-config-volume" (OuterVolumeSpecName: "config-volume") pod "f13c9492-c978-4a41-976b-080495c9d5fc" (UID: "f13c9492-c978-4a41-976b-080495c9d5fc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.601445 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f13c9492-c978-4a41-976b-080495c9d5fc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f13c9492-c978-4a41-976b-080495c9d5fc" (UID: "f13c9492-c978-4a41-976b-080495c9d5fc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.602856 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f13c9492-c978-4a41-976b-080495c9d5fc-kube-api-access-2clvq" (OuterVolumeSpecName: "kube-api-access-2clvq") pod "f13c9492-c978-4a41-976b-080495c9d5fc" (UID: "f13c9492-c978-4a41-976b-080495c9d5fc"). InnerVolumeSpecName "kube-api-access-2clvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.681527 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5"] Dec 04 15:15:03 crc kubenswrapper[4946]: W1204 15:15:03.689148 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod695e348c_7a92_4a69_b104_1f37361d5c49.slice/crio-e4b4f603b3fb11bd950c41b0ee9fc2dec62e493b1ccf00dcd2df75cdd8b9f749 WatchSource:0}: Error finding container e4b4f603b3fb11bd950c41b0ee9fc2dec62e493b1ccf00dcd2df75cdd8b9f749: Status 404 returned error can't find the container with id e4b4f603b3fb11bd950c41b0ee9fc2dec62e493b1ccf00dcd2df75cdd8b9f749 Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.693397 4946 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f13c9492-c978-4a41-976b-080495c9d5fc-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.693422 4946 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f13c9492-c978-4a41-976b-080495c9d5fc-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 15:15:03 crc kubenswrapper[4946]: I1204 15:15:03.693433 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2clvq\" (UniqueName: \"kubernetes.io/projected/f13c9492-c978-4a41-976b-080495c9d5fc-kube-api-access-2clvq\") on node \"crc\" DevicePath \"\"" Dec 04 15:15:04 crc kubenswrapper[4946]: I1204 15:15:04.267612 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" event={"ID":"f13c9492-c978-4a41-976b-080495c9d5fc","Type":"ContainerDied","Data":"167214e85128898e21803ae3a7a4f8f5213fbed53ab84fecb3a9d5c9df16930e"} Dec 04 15:15:04 crc kubenswrapper[4946]: I1204 15:15:04.267666 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="167214e85128898e21803ae3a7a4f8f5213fbed53ab84fecb3a9d5c9df16930e" Dec 04 15:15:04 crc kubenswrapper[4946]: I1204 15:15:04.267691 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj" Dec 04 15:15:04 crc kubenswrapper[4946]: I1204 15:15:04.278298 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" event={"ID":"695e348c-7a92-4a69-b104-1f37361d5c49","Type":"ContainerStarted","Data":"e4b4f603b3fb11bd950c41b0ee9fc2dec62e493b1ccf00dcd2df75cdd8b9f749"} Dec 04 15:15:07 crc kubenswrapper[4946]: I1204 15:15:07.452037 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:15:07 crc kubenswrapper[4946]: I1204 15:15:07.453358 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" Dec 04 15:15:08 crc kubenswrapper[4946]: I1204 15:15:08.452540 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:15:08 crc kubenswrapper[4946]: I1204 15:15:08.453005 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:15:08 crc kubenswrapper[4946]: I1204 15:15:08.453368 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:15:08 crc kubenswrapper[4946]: I1204 15:15:08.454013 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" Dec 04 15:15:14 crc kubenswrapper[4946]: I1204 15:15:14.612301 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-rf6pw"] Dec 04 15:15:14 crc kubenswrapper[4946]: W1204 15:15:14.625068 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9624505_3974_47fb_93d7_1a2ff73b29c7.slice/crio-01db870041e21632ea386c3994d123fcb77ff85e9fb7655beab465bcfc3909ff WatchSource:0}: Error finding container 01db870041e21632ea386c3994d123fcb77ff85e9fb7655beab465bcfc3909ff: Status 404 returned error can't find the container with id 01db870041e21632ea386c3994d123fcb77ff85e9fb7655beab465bcfc3909ff Dec 04 15:15:14 crc kubenswrapper[4946]: I1204 15:15:14.655471 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl"] Dec 04 15:15:14 crc kubenswrapper[4946]: W1204 15:15:14.656296 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod154e09f2_667a_45bf_abdb_fc3e1f0f0ba6.slice/crio-94cd3a80fefdf71b52dc494e832780c58a2f73bcaecbe377b3308bf6027924ed WatchSource:0}: Error finding container 94cd3a80fefdf71b52dc494e832780c58a2f73bcaecbe377b3308bf6027924ed: Status 404 returned error can't find the container with id 94cd3a80fefdf71b52dc494e832780c58a2f73bcaecbe377b3308bf6027924ed Dec 04 15:15:14 crc kubenswrapper[4946]: I1204 15:15:14.665942 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr"] Dec 04 15:15:14 crc kubenswrapper[4946]: W1204 15:15:14.668574 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77a9c1a6_41d8_4285_a6ba_0aa3eb18fb11.slice/crio-f2fc11297740e5fa86c529f56afb8e77c85b27811465edcd715c79d0a7bb6f45 WatchSource:0}: Error finding container f2fc11297740e5fa86c529f56afb8e77c85b27811465edcd715c79d0a7bb6f45: Status 404 returned error can't find the container with id f2fc11297740e5fa86c529f56afb8e77c85b27811465edcd715c79d0a7bb6f45 Dec 04 15:15:15 crc kubenswrapper[4946]: I1204 15:15:15.351948 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" event={"ID":"695e348c-7a92-4a69-b104-1f37361d5c49","Type":"ContainerStarted","Data":"56457a0c1796bd7535ca5ac9d01bd2ec074be887564a9c4d1bc40412db22f570"} Dec 04 15:15:15 crc kubenswrapper[4946]: I1204 15:15:15.354199 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" event={"ID":"c9624505-3974-47fb-93d7-1a2ff73b29c7","Type":"ContainerStarted","Data":"01db870041e21632ea386c3994d123fcb77ff85e9fb7655beab465bcfc3909ff"} Dec 04 15:15:15 crc kubenswrapper[4946]: I1204 15:15:15.357888 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" event={"ID":"adb38877-f50c-48aa-a3ca-951150033479","Type":"ContainerStarted","Data":"bcab0d868fc54616c205bc4a8df378899b6a54733dcf3cef1de670a47ea58bf4"} Dec 04 15:15:15 crc kubenswrapper[4946]: I1204 15:15:15.358084 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:15:15 crc kubenswrapper[4946]: I1204 15:15:15.359170 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" event={"ID":"154e09f2-667a-45bf-abdb-fc3e1f0f0ba6","Type":"ContainerStarted","Data":"94cd3a80fefdf71b52dc494e832780c58a2f73bcaecbe377b3308bf6027924ed"} Dec 04 15:15:15 crc kubenswrapper[4946]: I1204 15:15:15.363864 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" event={"ID":"77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11","Type":"ContainerStarted","Data":"32f3c015ee616c3ae6f5ee08ecda8d6aede92fa1e7a404f5adb286fa89a046af"} Dec 04 15:15:15 crc kubenswrapper[4946]: I1204 15:15:15.364109 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" event={"ID":"77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11","Type":"ContainerStarted","Data":"f2fc11297740e5fa86c529f56afb8e77c85b27811465edcd715c79d0a7bb6f45"} Dec 04 15:15:15 crc kubenswrapper[4946]: I1204 15:15:15.365498 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" Dec 04 15:15:15 crc kubenswrapper[4946]: I1204 15:15:15.378444 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5" podStartSLOduration=32.908952836 podStartE2EDuration="43.378421533s" podCreationTimestamp="2025-12-04 15:14:32 +0000 UTC" firstStartedPulling="2025-12-04 15:15:03.694410059 +0000 UTC m=+754.580453700" lastFinishedPulling="2025-12-04 15:15:14.163878756 +0000 UTC m=+765.049922397" observedRunningTime="2025-12-04 15:15:15.373012919 +0000 UTC m=+766.259056560" watchObservedRunningTime="2025-12-04 15:15:15.378421533 +0000 UTC m=+766.264465164" Dec 04 15:15:15 crc kubenswrapper[4946]: I1204 15:15:15.412361 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr" podStartSLOduration=43.412333881 podStartE2EDuration="43.412333881s" podCreationTimestamp="2025-12-04 15:14:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:15:15.407688727 +0000 UTC m=+766.293732368" watchObservedRunningTime="2025-12-04 15:15:15.412333881 +0000 UTC m=+766.298377542" Dec 04 15:15:15 crc kubenswrapper[4946]: I1204 15:15:15.443836 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-bmpm5" podStartSLOduration=31.989089098 podStartE2EDuration="43.443808564s" podCreationTimestamp="2025-12-04 15:14:32 +0000 UTC" firstStartedPulling="2025-12-04 15:15:02.712106979 +0000 UTC m=+753.598150620" lastFinishedPulling="2025-12-04 15:15:14.166826445 +0000 UTC m=+765.052870086" observedRunningTime="2025-12-04 15:15:15.436269942 +0000 UTC m=+766.322313583" watchObservedRunningTime="2025-12-04 15:15:15.443808564 +0000 UTC m=+766.329852205" Dec 04 15:15:18 crc kubenswrapper[4946]: I1204 15:15:18.387522 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" event={"ID":"c9624505-3974-47fb-93d7-1a2ff73b29c7","Type":"ContainerStarted","Data":"628da4b894914e9bd35f5f8ca0ea98abf3f685b2ab33922b178f42a100b0bde5"} Dec 04 15:15:18 crc kubenswrapper[4946]: I1204 15:15:18.388338 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:15:18 crc kubenswrapper[4946]: I1204 15:15:18.390231 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" event={"ID":"154e09f2-667a-45bf-abdb-fc3e1f0f0ba6","Type":"ContainerStarted","Data":"c2bedcbcbec001efdf63323253b8abafd39224f58e519c029db029daa884372d"} Dec 04 15:15:18 crc kubenswrapper[4946]: I1204 15:15:18.416383 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" podStartSLOduration=43.442240872 podStartE2EDuration="46.416360888s" podCreationTimestamp="2025-12-04 15:14:32 +0000 UTC" firstStartedPulling="2025-12-04 15:15:14.628146236 +0000 UTC m=+765.514189877" lastFinishedPulling="2025-12-04 15:15:17.602266252 +0000 UTC m=+768.488309893" observedRunningTime="2025-12-04 15:15:18.411844327 +0000 UTC m=+769.297887988" watchObservedRunningTime="2025-12-04 15:15:18.416360888 +0000 UTC m=+769.302404529" Dec 04 15:15:18 crc kubenswrapper[4946]: I1204 15:15:18.437860 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-tjsxl" podStartSLOduration=43.50227383 podStartE2EDuration="46.437837793s" podCreationTimestamp="2025-12-04 15:14:32 +0000 UTC" firstStartedPulling="2025-12-04 15:15:14.661190571 +0000 UTC m=+765.547234212" lastFinishedPulling="2025-12-04 15:15:17.596754534 +0000 UTC m=+768.482798175" observedRunningTime="2025-12-04 15:15:18.434186855 +0000 UTC m=+769.320230506" watchObservedRunningTime="2025-12-04 15:15:18.437837793 +0000 UTC m=+769.323881434" Dec 04 15:15:21 crc kubenswrapper[4946]: I1204 15:15:21.969207 4946 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 04 15:15:22 crc kubenswrapper[4946]: I1204 15:15:22.478390 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:15:22 crc kubenswrapper[4946]: I1204 15:15:22.478486 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:15:22 crc kubenswrapper[4946]: I1204 15:15:22.996973 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-rf6pw" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.602074 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-kd8gk"] Dec 04 15:15:24 crc kubenswrapper[4946]: E1204 15:15:24.602921 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f13c9492-c978-4a41-976b-080495c9d5fc" containerName="collect-profiles" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.602939 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f13c9492-c978-4a41-976b-080495c9d5fc" containerName="collect-profiles" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.603049 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f13c9492-c978-4a41-976b-080495c9d5fc" containerName="collect-profiles" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.603628 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-kd8gk" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.606732 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.616104 4946 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-zdmlr" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.616128 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.622479 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-kd8gk"] Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.628152 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-cr2wp"] Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.629074 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-cr2wp" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.631584 4946 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-92pb7" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.638598 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-dd4pw"] Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.639879 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-dd4pw" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.642877 4946 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-4pcmc" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.646083 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-cr2wp"] Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.666901 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-dd4pw"] Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.697527 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52ffc\" (UniqueName: \"kubernetes.io/projected/bb8b188e-8662-4027-9493-886326967ed1-kube-api-access-52ffc\") pod \"cert-manager-cainjector-7f985d654d-kd8gk\" (UID: \"bb8b188e-8662-4027-9493-886326967ed1\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-kd8gk" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.697700 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75v98\" (UniqueName: \"kubernetes.io/projected/3611b347-1802-4635-8abd-47d9a6f4ad29-kube-api-access-75v98\") pod \"cert-manager-webhook-5655c58dd6-dd4pw\" (UID: \"3611b347-1802-4635-8abd-47d9a6f4ad29\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-dd4pw" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.697838 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsht7\" (UniqueName: \"kubernetes.io/projected/41b2bdd0-54fd-436e-a498-056e3fdd6934-kube-api-access-lsht7\") pod \"cert-manager-5b446d88c5-cr2wp\" (UID: \"41b2bdd0-54fd-436e-a498-056e3fdd6934\") " pod="cert-manager/cert-manager-5b446d88c5-cr2wp" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.798761 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75v98\" (UniqueName: \"kubernetes.io/projected/3611b347-1802-4635-8abd-47d9a6f4ad29-kube-api-access-75v98\") pod \"cert-manager-webhook-5655c58dd6-dd4pw\" (UID: \"3611b347-1802-4635-8abd-47d9a6f4ad29\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-dd4pw" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.798857 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsht7\" (UniqueName: \"kubernetes.io/projected/41b2bdd0-54fd-436e-a498-056e3fdd6934-kube-api-access-lsht7\") pod \"cert-manager-5b446d88c5-cr2wp\" (UID: \"41b2bdd0-54fd-436e-a498-056e3fdd6934\") " pod="cert-manager/cert-manager-5b446d88c5-cr2wp" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.798939 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52ffc\" (UniqueName: \"kubernetes.io/projected/bb8b188e-8662-4027-9493-886326967ed1-kube-api-access-52ffc\") pod \"cert-manager-cainjector-7f985d654d-kd8gk\" (UID: \"bb8b188e-8662-4027-9493-886326967ed1\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-kd8gk" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.818707 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75v98\" (UniqueName: \"kubernetes.io/projected/3611b347-1802-4635-8abd-47d9a6f4ad29-kube-api-access-75v98\") pod \"cert-manager-webhook-5655c58dd6-dd4pw\" (UID: \"3611b347-1802-4635-8abd-47d9a6f4ad29\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-dd4pw" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.818936 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52ffc\" (UniqueName: \"kubernetes.io/projected/bb8b188e-8662-4027-9493-886326967ed1-kube-api-access-52ffc\") pod \"cert-manager-cainjector-7f985d654d-kd8gk\" (UID: \"bb8b188e-8662-4027-9493-886326967ed1\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-kd8gk" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.818968 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsht7\" (UniqueName: \"kubernetes.io/projected/41b2bdd0-54fd-436e-a498-056e3fdd6934-kube-api-access-lsht7\") pod \"cert-manager-5b446d88c5-cr2wp\" (UID: \"41b2bdd0-54fd-436e-a498-056e3fdd6934\") " pod="cert-manager/cert-manager-5b446d88c5-cr2wp" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.924043 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-kd8gk" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.943192 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-cr2wp" Dec 04 15:15:24 crc kubenswrapper[4946]: I1204 15:15:24.954462 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-dd4pw" Dec 04 15:15:25 crc kubenswrapper[4946]: I1204 15:15:25.737383 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-dd4pw"] Dec 04 15:15:25 crc kubenswrapper[4946]: W1204 15:15:25.750916 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3611b347_1802_4635_8abd_47d9a6f4ad29.slice/crio-1abedb8d62ceea5eef52988da176b8dccf85b2ac0b81f9b25df40cd26977fd0b WatchSource:0}: Error finding container 1abedb8d62ceea5eef52988da176b8dccf85b2ac0b81f9b25df40cd26977fd0b: Status 404 returned error can't find the container with id 1abedb8d62ceea5eef52988da176b8dccf85b2ac0b81f9b25df40cd26977fd0b Dec 04 15:15:25 crc kubenswrapper[4946]: I1204 15:15:25.798882 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-cr2wp"] Dec 04 15:15:25 crc kubenswrapper[4946]: I1204 15:15:25.900814 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-kd8gk"] Dec 04 15:15:26 crc kubenswrapper[4946]: I1204 15:15:26.436774 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-dd4pw" event={"ID":"3611b347-1802-4635-8abd-47d9a6f4ad29","Type":"ContainerStarted","Data":"1abedb8d62ceea5eef52988da176b8dccf85b2ac0b81f9b25df40cd26977fd0b"} Dec 04 15:15:26 crc kubenswrapper[4946]: I1204 15:15:26.444065 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-kd8gk" event={"ID":"bb8b188e-8662-4027-9493-886326967ed1","Type":"ContainerStarted","Data":"f7242f18f36933d589d79de1d872b90d482e8f476a22a4883877c21b6f6fb8f8"} Dec 04 15:15:26 crc kubenswrapper[4946]: I1204 15:15:26.483708 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-cr2wp" event={"ID":"41b2bdd0-54fd-436e-a498-056e3fdd6934","Type":"ContainerStarted","Data":"ff0b303cfee41b7fe659bcd5fde9b8daf008517f515186c491d4d49bebf50e21"} Dec 04 15:15:32 crc kubenswrapper[4946]: I1204 15:15:32.537488 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-dd4pw" event={"ID":"3611b347-1802-4635-8abd-47d9a6f4ad29","Type":"ContainerStarted","Data":"c9f7149300ebac9ba7e7aa0b30c16db1669fd179547603328649b202c4584142"} Dec 04 15:15:32 crc kubenswrapper[4946]: I1204 15:15:32.539080 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-dd4pw" Dec 04 15:15:32 crc kubenswrapper[4946]: I1204 15:15:32.540483 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-kd8gk" event={"ID":"bb8b188e-8662-4027-9493-886326967ed1","Type":"ContainerStarted","Data":"29b4ecdc3a3e576da946109939696e0b758e34638a66e0383ff63dc1af4aed88"} Dec 04 15:15:32 crc kubenswrapper[4946]: I1204 15:15:32.541930 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-cr2wp" event={"ID":"41b2bdd0-54fd-436e-a498-056e3fdd6934","Type":"ContainerStarted","Data":"99f00f52542c7e3b01b2f409410f88e4f979223c961a89c5bc27ad00bedb2d14"} Dec 04 15:15:32 crc kubenswrapper[4946]: I1204 15:15:32.562331 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-dd4pw" podStartSLOduration=2.697505877 podStartE2EDuration="8.562300504s" podCreationTimestamp="2025-12-04 15:15:24 +0000 UTC" firstStartedPulling="2025-12-04 15:15:25.753883545 +0000 UTC m=+776.639927176" lastFinishedPulling="2025-12-04 15:15:31.618678162 +0000 UTC m=+782.504721803" observedRunningTime="2025-12-04 15:15:32.556617314 +0000 UTC m=+783.442660955" watchObservedRunningTime="2025-12-04 15:15:32.562300504 +0000 UTC m=+783.448344145" Dec 04 15:15:32 crc kubenswrapper[4946]: I1204 15:15:32.578100 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-cr2wp" podStartSLOduration=2.704097337 podStartE2EDuration="8.578070522s" podCreationTimestamp="2025-12-04 15:15:24 +0000 UTC" firstStartedPulling="2025-12-04 15:15:25.832376906 +0000 UTC m=+776.718420547" lastFinishedPulling="2025-12-04 15:15:31.706350091 +0000 UTC m=+782.592393732" observedRunningTime="2025-12-04 15:15:32.576905631 +0000 UTC m=+783.462949272" watchObservedRunningTime="2025-12-04 15:15:32.578070522 +0000 UTC m=+783.464114163" Dec 04 15:15:32 crc kubenswrapper[4946]: I1204 15:15:32.628696 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-kd8gk" podStartSLOduration=2.919578612 podStartE2EDuration="8.6286581s" podCreationTimestamp="2025-12-04 15:15:24 +0000 UTC" firstStartedPulling="2025-12-04 15:15:25.911058463 +0000 UTC m=+776.797102104" lastFinishedPulling="2025-12-04 15:15:31.620137951 +0000 UTC m=+782.506181592" observedRunningTime="2025-12-04 15:15:32.621517141 +0000 UTC m=+783.507560782" watchObservedRunningTime="2025-12-04 15:15:32.6286581 +0000 UTC m=+783.514701741" Dec 04 15:15:39 crc kubenswrapper[4946]: I1204 15:15:39.959335 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-dd4pw" Dec 04 15:15:52 crc kubenswrapper[4946]: I1204 15:15:52.478512 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:15:52 crc kubenswrapper[4946]: I1204 15:15:52.479956 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.660698 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk"] Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.662812 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.665364 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.676550 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk"] Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.741066 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.741176 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.741208 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vttws\" (UniqueName: \"kubernetes.io/projected/768219c3-3efe-4a2f-9ac3-55cd3247166e-kube-api-access-vttws\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.842640 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.842754 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.842785 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vttws\" (UniqueName: \"kubernetes.io/projected/768219c3-3efe-4a2f-9ac3-55cd3247166e-kube-api-access-vttws\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.843363 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.843499 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.865051 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vttws\" (UniqueName: \"kubernetes.io/projected/768219c3-3efe-4a2f-9ac3-55cd3247166e-kube-api-access-vttws\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:11 crc kubenswrapper[4946]: I1204 15:16:11.978641 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:12 crc kubenswrapper[4946]: I1204 15:16:12.228470 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk"] Dec 04 15:16:12 crc kubenswrapper[4946]: I1204 15:16:12.809225 4946 generic.go:334] "Generic (PLEG): container finished" podID="768219c3-3efe-4a2f-9ac3-55cd3247166e" containerID="947224a727f22c716223d446935ea4442a435c35e5751cfe9fc9a66beec5fd96" exitCode=0 Dec 04 15:16:12 crc kubenswrapper[4946]: I1204 15:16:12.809294 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" event={"ID":"768219c3-3efe-4a2f-9ac3-55cd3247166e","Type":"ContainerDied","Data":"947224a727f22c716223d446935ea4442a435c35e5751cfe9fc9a66beec5fd96"} Dec 04 15:16:12 crc kubenswrapper[4946]: I1204 15:16:12.809335 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" event={"ID":"768219c3-3efe-4a2f-9ac3-55cd3247166e","Type":"ContainerStarted","Data":"f283470ee355cbf3e14c5f1b4cbc223764de893742fa5a798f235c1557f77721"} Dec 04 15:16:13 crc kubenswrapper[4946]: I1204 15:16:13.846097 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qrndj"] Dec 04 15:16:13 crc kubenswrapper[4946]: I1204 15:16:13.850005 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:13 crc kubenswrapper[4946]: I1204 15:16:13.864210 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qrndj"] Dec 04 15:16:13 crc kubenswrapper[4946]: I1204 15:16:13.983708 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-catalog-content\") pod \"redhat-operators-qrndj\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:13 crc kubenswrapper[4946]: I1204 15:16:13.983782 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvh2j\" (UniqueName: \"kubernetes.io/projected/5846224f-c43c-4c15-b5aa-596e19535a4c-kube-api-access-rvh2j\") pod \"redhat-operators-qrndj\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:13 crc kubenswrapper[4946]: I1204 15:16:13.984053 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-utilities\") pod \"redhat-operators-qrndj\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.085385 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-catalog-content\") pod \"redhat-operators-qrndj\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.085454 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvh2j\" (UniqueName: \"kubernetes.io/projected/5846224f-c43c-4c15-b5aa-596e19535a4c-kube-api-access-rvh2j\") pod \"redhat-operators-qrndj\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.085508 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-utilities\") pod \"redhat-operators-qrndj\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.086705 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-utilities\") pod \"redhat-operators-qrndj\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.086809 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-catalog-content\") pod \"redhat-operators-qrndj\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.114469 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvh2j\" (UniqueName: \"kubernetes.io/projected/5846224f-c43c-4c15-b5aa-596e19535a4c-kube-api-access-rvh2j\") pod \"redhat-operators-qrndj\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.174725 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.432410 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qrndj"] Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.590033 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.591891 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.595165 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.595362 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.596372 4946 reflector.go:368] Caches populated for *v1.Secret from object-"minio-dev"/"default-dockercfg-7hcjt" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.602676 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.693484 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-437b3b22-d332-4ab0-bd5d-5d240067ef27\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-437b3b22-d332-4ab0-bd5d-5d240067ef27\") pod \"minio\" (UID: \"a6a750c6-cb91-42aa-ab70-5ac5879c6774\") " pod="minio-dev/minio" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.693750 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6pjq\" (UniqueName: \"kubernetes.io/projected/a6a750c6-cb91-42aa-ab70-5ac5879c6774-kube-api-access-b6pjq\") pod \"minio\" (UID: \"a6a750c6-cb91-42aa-ab70-5ac5879c6774\") " pod="minio-dev/minio" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.795454 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-437b3b22-d332-4ab0-bd5d-5d240067ef27\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-437b3b22-d332-4ab0-bd5d-5d240067ef27\") pod \"minio\" (UID: \"a6a750c6-cb91-42aa-ab70-5ac5879c6774\") " pod="minio-dev/minio" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.795553 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6pjq\" (UniqueName: \"kubernetes.io/projected/a6a750c6-cb91-42aa-ab70-5ac5879c6774-kube-api-access-b6pjq\") pod \"minio\" (UID: \"a6a750c6-cb91-42aa-ab70-5ac5879c6774\") " pod="minio-dev/minio" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.802726 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.802787 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-437b3b22-d332-4ab0-bd5d-5d240067ef27\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-437b3b22-d332-4ab0-bd5d-5d240067ef27\") pod \"minio\" (UID: \"a6a750c6-cb91-42aa-ab70-5ac5879c6774\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7cb7c3654810592e1ed9dec522659ce294ad5fc62a03bea08aa29267077bccca/globalmount\"" pod="minio-dev/minio" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.821830 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6pjq\" (UniqueName: \"kubernetes.io/projected/a6a750c6-cb91-42aa-ab70-5ac5879c6774-kube-api-access-b6pjq\") pod \"minio\" (UID: \"a6a750c6-cb91-42aa-ab70-5ac5879c6774\") " pod="minio-dev/minio" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.837644 4946 generic.go:334] "Generic (PLEG): container finished" podID="768219c3-3efe-4a2f-9ac3-55cd3247166e" containerID="1452e8c3dde80f4289f45949dc8075dbfc7b1df03e96ba5b8188a267706984f8" exitCode=0 Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.837741 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" event={"ID":"768219c3-3efe-4a2f-9ac3-55cd3247166e","Type":"ContainerDied","Data":"1452e8c3dde80f4289f45949dc8075dbfc7b1df03e96ba5b8188a267706984f8"} Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.841072 4946 generic.go:334] "Generic (PLEG): container finished" podID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerID="00c50aaeda450cb2cd05ea9ad76af73ec767118ac3cca43e2f2e361088115da8" exitCode=0 Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.841104 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrndj" event={"ID":"5846224f-c43c-4c15-b5aa-596e19535a4c","Type":"ContainerDied","Data":"00c50aaeda450cb2cd05ea9ad76af73ec767118ac3cca43e2f2e361088115da8"} Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.841154 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrndj" event={"ID":"5846224f-c43c-4c15-b5aa-596e19535a4c","Type":"ContainerStarted","Data":"7f67c9d5f07f4e679976048c46747c4100c7b10a8626a6ee2ab9dbb76cb872bd"} Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.842975 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-437b3b22-d332-4ab0-bd5d-5d240067ef27\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-437b3b22-d332-4ab0-bd5d-5d240067ef27\") pod \"minio\" (UID: \"a6a750c6-cb91-42aa-ab70-5ac5879c6774\") " pod="minio-dev/minio" Dec 04 15:16:14 crc kubenswrapper[4946]: I1204 15:16:14.972250 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 04 15:16:15 crc kubenswrapper[4946]: I1204 15:16:15.267247 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 04 15:16:15 crc kubenswrapper[4946]: I1204 15:16:15.850450 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrndj" event={"ID":"5846224f-c43c-4c15-b5aa-596e19535a4c","Type":"ContainerStarted","Data":"7ee83a67e650a78796aa29579bde5dd9bea407bf85ba93f88fea2743d84af184"} Dec 04 15:16:15 crc kubenswrapper[4946]: I1204 15:16:15.851830 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"a6a750c6-cb91-42aa-ab70-5ac5879c6774","Type":"ContainerStarted","Data":"3f21ccde5efa3198f6bae3f8e041438232c6c207cc25f3aa54783117bf496596"} Dec 04 15:16:15 crc kubenswrapper[4946]: I1204 15:16:15.854743 4946 generic.go:334] "Generic (PLEG): container finished" podID="768219c3-3efe-4a2f-9ac3-55cd3247166e" containerID="a44e418befd7ce27e9165dbb33db3ab0190db63a7ed54cd0ec36eccea2047382" exitCode=0 Dec 04 15:16:15 crc kubenswrapper[4946]: I1204 15:16:15.854791 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" event={"ID":"768219c3-3efe-4a2f-9ac3-55cd3247166e","Type":"ContainerDied","Data":"a44e418befd7ce27e9165dbb33db3ab0190db63a7ed54cd0ec36eccea2047382"} Dec 04 15:16:16 crc kubenswrapper[4946]: I1204 15:16:16.863778 4946 generic.go:334] "Generic (PLEG): container finished" podID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerID="7ee83a67e650a78796aa29579bde5dd9bea407bf85ba93f88fea2743d84af184" exitCode=0 Dec 04 15:16:16 crc kubenswrapper[4946]: I1204 15:16:16.864532 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrndj" event={"ID":"5846224f-c43c-4c15-b5aa-596e19535a4c","Type":"ContainerDied","Data":"7ee83a67e650a78796aa29579bde5dd9bea407bf85ba93f88fea2743d84af184"} Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.468329 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.578877 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-bundle\") pod \"768219c3-3efe-4a2f-9ac3-55cd3247166e\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.578971 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vttws\" (UniqueName: \"kubernetes.io/projected/768219c3-3efe-4a2f-9ac3-55cd3247166e-kube-api-access-vttws\") pod \"768219c3-3efe-4a2f-9ac3-55cd3247166e\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.579087 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-util\") pod \"768219c3-3efe-4a2f-9ac3-55cd3247166e\" (UID: \"768219c3-3efe-4a2f-9ac3-55cd3247166e\") " Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.580794 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-bundle" (OuterVolumeSpecName: "bundle") pod "768219c3-3efe-4a2f-9ac3-55cd3247166e" (UID: "768219c3-3efe-4a2f-9ac3-55cd3247166e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.593181 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/768219c3-3efe-4a2f-9ac3-55cd3247166e-kube-api-access-vttws" (OuterVolumeSpecName: "kube-api-access-vttws") pod "768219c3-3efe-4a2f-9ac3-55cd3247166e" (UID: "768219c3-3efe-4a2f-9ac3-55cd3247166e"). InnerVolumeSpecName "kube-api-access-vttws". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.595624 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-util" (OuterVolumeSpecName: "util") pod "768219c3-3efe-4a2f-9ac3-55cd3247166e" (UID: "768219c3-3efe-4a2f-9ac3-55cd3247166e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.680531 4946 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.680572 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vttws\" (UniqueName: \"kubernetes.io/projected/768219c3-3efe-4a2f-9ac3-55cd3247166e-kube-api-access-vttws\") on node \"crc\" DevicePath \"\"" Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.680585 4946 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/768219c3-3efe-4a2f-9ac3-55cd3247166e-util\") on node \"crc\" DevicePath \"\"" Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.874723 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" event={"ID":"768219c3-3efe-4a2f-9ac3-55cd3247166e","Type":"ContainerDied","Data":"f283470ee355cbf3e14c5f1b4cbc223764de893742fa5a798f235c1557f77721"} Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.874774 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f283470ee355cbf3e14c5f1b4cbc223764de893742fa5a798f235c1557f77721" Dec 04 15:16:17 crc kubenswrapper[4946]: I1204 15:16:17.874857 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk" Dec 04 15:16:19 crc kubenswrapper[4946]: I1204 15:16:19.892626 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrndj" event={"ID":"5846224f-c43c-4c15-b5aa-596e19535a4c","Type":"ContainerStarted","Data":"bbe000437c84fe84557ce6862eb115ed856dcc6822c38fc66a907160d33c2dd0"} Dec 04 15:16:19 crc kubenswrapper[4946]: I1204 15:16:19.895268 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"a6a750c6-cb91-42aa-ab70-5ac5879c6774","Type":"ContainerStarted","Data":"3d52dbca93d87459db1379f311e151f939540fc5e165f31d03ecb4fa3db8137a"} Dec 04 15:16:19 crc kubenswrapper[4946]: I1204 15:16:19.938687 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=4.385646937 podStartE2EDuration="7.938636857s" podCreationTimestamp="2025-12-04 15:16:12 +0000 UTC" firstStartedPulling="2025-12-04 15:16:15.337563961 +0000 UTC m=+826.223607602" lastFinishedPulling="2025-12-04 15:16:18.890553881 +0000 UTC m=+829.776597522" observedRunningTime="2025-12-04 15:16:19.936701646 +0000 UTC m=+830.822745287" watchObservedRunningTime="2025-12-04 15:16:19.938636857 +0000 UTC m=+830.824680508" Dec 04 15:16:19 crc kubenswrapper[4946]: I1204 15:16:19.943183 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qrndj" podStartSLOduration=2.918500709 podStartE2EDuration="6.943157766s" podCreationTimestamp="2025-12-04 15:16:13 +0000 UTC" firstStartedPulling="2025-12-04 15:16:14.842868175 +0000 UTC m=+825.728911816" lastFinishedPulling="2025-12-04 15:16:18.867525232 +0000 UTC m=+829.753568873" observedRunningTime="2025-12-04 15:16:19.91985214 +0000 UTC m=+830.805895781" watchObservedRunningTime="2025-12-04 15:16:19.943157766 +0000 UTC m=+830.829201417" Dec 04 15:16:22 crc kubenswrapper[4946]: I1204 15:16:22.478594 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:16:22 crc kubenswrapper[4946]: I1204 15:16:22.478669 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:16:22 crc kubenswrapper[4946]: I1204 15:16:22.478716 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:16:22 crc kubenswrapper[4946]: I1204 15:16:22.479233 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4a5d66e82f6e47d86ff02c63e947477c5c01ef6d0d42318658f454eca8014377"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:16:22 crc kubenswrapper[4946]: I1204 15:16:22.479292 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://4a5d66e82f6e47d86ff02c63e947477c5c01ef6d0d42318658f454eca8014377" gracePeriod=600 Dec 04 15:16:23 crc kubenswrapper[4946]: I1204 15:16:23.944179 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="4a5d66e82f6e47d86ff02c63e947477c5c01ef6d0d42318658f454eca8014377" exitCode=0 Dec 04 15:16:23 crc kubenswrapper[4946]: I1204 15:16:23.944280 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"4a5d66e82f6e47d86ff02c63e947477c5c01ef6d0d42318658f454eca8014377"} Dec 04 15:16:23 crc kubenswrapper[4946]: I1204 15:16:23.945269 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"53eeb1a5a8af1654e1978db4066dd9d62d695280b47fdbadb0ee39d16803c85c"} Dec 04 15:16:23 crc kubenswrapper[4946]: I1204 15:16:23.945308 4946 scope.go:117] "RemoveContainer" containerID="e87ab8d53f5aa9b89123a8ac882de8b8a67a0f850ea3ff58023f9e8d50c74ba2" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.176015 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.176101 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.669263 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct"] Dec 04 15:16:24 crc kubenswrapper[4946]: E1204 15:16:24.670241 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="768219c3-3efe-4a2f-9ac3-55cd3247166e" containerName="pull" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.670261 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="768219c3-3efe-4a2f-9ac3-55cd3247166e" containerName="pull" Dec 04 15:16:24 crc kubenswrapper[4946]: E1204 15:16:24.670273 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="768219c3-3efe-4a2f-9ac3-55cd3247166e" containerName="extract" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.670281 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="768219c3-3efe-4a2f-9ac3-55cd3247166e" containerName="extract" Dec 04 15:16:24 crc kubenswrapper[4946]: E1204 15:16:24.670303 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="768219c3-3efe-4a2f-9ac3-55cd3247166e" containerName="util" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.670314 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="768219c3-3efe-4a2f-9ac3-55cd3247166e" containerName="util" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.670499 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="768219c3-3efe-4a2f-9ac3-55cd3247166e" containerName="extract" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.671619 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.692658 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.692922 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.693154 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.693374 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-ppvzl" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.696580 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.697157 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.734434 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct"] Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.791009 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/14739c62-fc32-41a5-be6d-3f6673c6a231-manager-config\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.791085 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/14739c62-fc32-41a5-be6d-3f6673c6a231-apiservice-cert\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.791153 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/14739c62-fc32-41a5-be6d-3f6673c6a231-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.791179 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/14739c62-fc32-41a5-be6d-3f6673c6a231-webhook-cert\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.791199 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsg5k\" (UniqueName: \"kubernetes.io/projected/14739c62-fc32-41a5-be6d-3f6673c6a231-kube-api-access-bsg5k\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.892820 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/14739c62-fc32-41a5-be6d-3f6673c6a231-apiservice-cert\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.892976 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/14739c62-fc32-41a5-be6d-3f6673c6a231-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.893017 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/14739c62-fc32-41a5-be6d-3f6673c6a231-webhook-cert\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.893046 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsg5k\" (UniqueName: \"kubernetes.io/projected/14739c62-fc32-41a5-be6d-3f6673c6a231-kube-api-access-bsg5k\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.893098 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/14739c62-fc32-41a5-be6d-3f6673c6a231-manager-config\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.894371 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/14739c62-fc32-41a5-be6d-3f6673c6a231-manager-config\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.903316 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/14739c62-fc32-41a5-be6d-3f6673c6a231-apiservice-cert\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.908911 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/14739c62-fc32-41a5-be6d-3f6673c6a231-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.912817 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/14739c62-fc32-41a5-be6d-3f6673c6a231-webhook-cert\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.927443 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsg5k\" (UniqueName: \"kubernetes.io/projected/14739c62-fc32-41a5-be6d-3f6673c6a231-kube-api-access-bsg5k\") pod \"loki-operator-controller-manager-5f5b48f4dc-5fjct\" (UID: \"14739c62-fc32-41a5-be6d-3f6673c6a231\") " pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:24 crc kubenswrapper[4946]: I1204 15:16:24.998641 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:25 crc kubenswrapper[4946]: I1204 15:16:25.228325 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qrndj" podUID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerName="registry-server" probeResult="failure" output=< Dec 04 15:16:25 crc kubenswrapper[4946]: timeout: failed to connect service ":50051" within 1s Dec 04 15:16:25 crc kubenswrapper[4946]: > Dec 04 15:16:25 crc kubenswrapper[4946]: I1204 15:16:25.539502 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct"] Dec 04 15:16:25 crc kubenswrapper[4946]: I1204 15:16:25.985540 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" event={"ID":"14739c62-fc32-41a5-be6d-3f6673c6a231","Type":"ContainerStarted","Data":"7bad2e05c8eeff0991790d296519fa25acfd86dd2ee10b74ee334acefcfabd0b"} Dec 04 15:16:34 crc kubenswrapper[4946]: I1204 15:16:34.041580 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" event={"ID":"14739c62-fc32-41a5-be6d-3f6673c6a231","Type":"ContainerStarted","Data":"29228ad244908904df3f5c5c6f3baa17f7d47629301a0829bcd19d29403ff5ff"} Dec 04 15:16:34 crc kubenswrapper[4946]: I1204 15:16:34.235101 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:34 crc kubenswrapper[4946]: I1204 15:16:34.281600 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:36 crc kubenswrapper[4946]: I1204 15:16:36.627065 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qrndj"] Dec 04 15:16:36 crc kubenswrapper[4946]: I1204 15:16:36.627854 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qrndj" podUID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerName="registry-server" containerID="cri-o://bbe000437c84fe84557ce6862eb115ed856dcc6822c38fc66a907160d33c2dd0" gracePeriod=2 Dec 04 15:16:37 crc kubenswrapper[4946]: I1204 15:16:37.073684 4946 generic.go:334] "Generic (PLEG): container finished" podID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerID="bbe000437c84fe84557ce6862eb115ed856dcc6822c38fc66a907160d33c2dd0" exitCode=0 Dec 04 15:16:37 crc kubenswrapper[4946]: I1204 15:16:37.073730 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrndj" event={"ID":"5846224f-c43c-4c15-b5aa-596e19535a4c","Type":"ContainerDied","Data":"bbe000437c84fe84557ce6862eb115ed856dcc6822c38fc66a907160d33c2dd0"} Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.111753 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qrndj" event={"ID":"5846224f-c43c-4c15-b5aa-596e19535a4c","Type":"ContainerDied","Data":"7f67c9d5f07f4e679976048c46747c4100c7b10a8626a6ee2ab9dbb76cb872bd"} Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.113424 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f67c9d5f07f4e679976048c46747c4100c7b10a8626a6ee2ab9dbb76cb872bd" Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.149240 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.257504 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-catalog-content\") pod \"5846224f-c43c-4c15-b5aa-596e19535a4c\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.257579 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-utilities\") pod \"5846224f-c43c-4c15-b5aa-596e19535a4c\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.257640 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvh2j\" (UniqueName: \"kubernetes.io/projected/5846224f-c43c-4c15-b5aa-596e19535a4c-kube-api-access-rvh2j\") pod \"5846224f-c43c-4c15-b5aa-596e19535a4c\" (UID: \"5846224f-c43c-4c15-b5aa-596e19535a4c\") " Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.258919 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-utilities" (OuterVolumeSpecName: "utilities") pod "5846224f-c43c-4c15-b5aa-596e19535a4c" (UID: "5846224f-c43c-4c15-b5aa-596e19535a4c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.259465 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.265652 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5846224f-c43c-4c15-b5aa-596e19535a4c-kube-api-access-rvh2j" (OuterVolumeSpecName: "kube-api-access-rvh2j") pod "5846224f-c43c-4c15-b5aa-596e19535a4c" (UID: "5846224f-c43c-4c15-b5aa-596e19535a4c"). InnerVolumeSpecName "kube-api-access-rvh2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.360493 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvh2j\" (UniqueName: \"kubernetes.io/projected/5846224f-c43c-4c15-b5aa-596e19535a4c-kube-api-access-rvh2j\") on node \"crc\" DevicePath \"\"" Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.374624 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5846224f-c43c-4c15-b5aa-596e19535a4c" (UID: "5846224f-c43c-4c15-b5aa-596e19535a4c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:16:41 crc kubenswrapper[4946]: I1204 15:16:41.461862 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5846224f-c43c-4c15-b5aa-596e19535a4c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:16:42 crc kubenswrapper[4946]: I1204 15:16:42.120367 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qrndj" Dec 04 15:16:42 crc kubenswrapper[4946]: I1204 15:16:42.120368 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" event={"ID":"14739c62-fc32-41a5-be6d-3f6673c6a231","Type":"ContainerStarted","Data":"34b5469e00ad28b6e9e32021b3d13135f4c4457a47318118a79b6439804e37a4"} Dec 04 15:16:42 crc kubenswrapper[4946]: I1204 15:16:42.121017 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:42 crc kubenswrapper[4946]: I1204 15:16:42.123852 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" Dec 04 15:16:42 crc kubenswrapper[4946]: I1204 15:16:42.150134 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-5f5b48f4dc-5fjct" podStartSLOduration=2.403794982 podStartE2EDuration="18.150088595s" podCreationTimestamp="2025-12-04 15:16:24 +0000 UTC" firstStartedPulling="2025-12-04 15:16:25.555293937 +0000 UTC m=+836.441337578" lastFinishedPulling="2025-12-04 15:16:41.30158755 +0000 UTC m=+852.187631191" observedRunningTime="2025-12-04 15:16:42.146211211 +0000 UTC m=+853.032254852" watchObservedRunningTime="2025-12-04 15:16:42.150088595 +0000 UTC m=+853.036132236" Dec 04 15:16:42 crc kubenswrapper[4946]: I1204 15:16:42.192046 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qrndj"] Dec 04 15:16:42 crc kubenswrapper[4946]: I1204 15:16:42.215788 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qrndj"] Dec 04 15:16:43 crc kubenswrapper[4946]: I1204 15:16:43.461461 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5846224f-c43c-4c15-b5aa-596e19535a4c" path="/var/lib/kubelet/pods/5846224f-c43c-4c15-b5aa-596e19535a4c/volumes" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.658267 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh"] Dec 04 15:17:11 crc kubenswrapper[4946]: E1204 15:17:11.659213 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerName="registry-server" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.659228 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerName="registry-server" Dec 04 15:17:11 crc kubenswrapper[4946]: E1204 15:17:11.659246 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerName="extract-content" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.659253 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerName="extract-content" Dec 04 15:17:11 crc kubenswrapper[4946]: E1204 15:17:11.659298 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerName="extract-utilities" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.659307 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerName="extract-utilities" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.659482 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="5846224f-c43c-4c15-b5aa-596e19535a4c" containerName="registry-server" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.660826 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.663750 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.672845 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh"] Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.733004 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.733075 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.733289 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h999h\" (UniqueName: \"kubernetes.io/projected/68203e19-3c15-4d99-a709-a7338b2f0dbc-kube-api-access-h999h\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.834818 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.834889 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.834935 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h999h\" (UniqueName: \"kubernetes.io/projected/68203e19-3c15-4d99-a709-a7338b2f0dbc-kube-api-access-h999h\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.835668 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.835715 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.877922 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h999h\" (UniqueName: \"kubernetes.io/projected/68203e19-3c15-4d99-a709-a7338b2f0dbc-kube-api-access-h999h\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:11 crc kubenswrapper[4946]: I1204 15:17:11.978378 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:12 crc kubenswrapper[4946]: I1204 15:17:12.472809 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh"] Dec 04 15:17:13 crc kubenswrapper[4946]: I1204 15:17:13.335221 4946 generic.go:334] "Generic (PLEG): container finished" podID="68203e19-3c15-4d99-a709-a7338b2f0dbc" containerID="916b2285e9904cbb758c488b0e4b4bf067865976e3237a7d125f455aae372b23" exitCode=0 Dec 04 15:17:13 crc kubenswrapper[4946]: I1204 15:17:13.335276 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" event={"ID":"68203e19-3c15-4d99-a709-a7338b2f0dbc","Type":"ContainerDied","Data":"916b2285e9904cbb758c488b0e4b4bf067865976e3237a7d125f455aae372b23"} Dec 04 15:17:13 crc kubenswrapper[4946]: I1204 15:17:13.335510 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" event={"ID":"68203e19-3c15-4d99-a709-a7338b2f0dbc","Type":"ContainerStarted","Data":"bc7360bff79f2f70f65ecd843acffc0300b4d9885d881c4f40ca0b01279269a0"} Dec 04 15:17:16 crc kubenswrapper[4946]: I1204 15:17:16.372018 4946 generic.go:334] "Generic (PLEG): container finished" podID="68203e19-3c15-4d99-a709-a7338b2f0dbc" containerID="402c60a78a6b7bf4328fe357ebe9e23792006f4a60fd6dff1f1883307c3b181a" exitCode=0 Dec 04 15:17:16 crc kubenswrapper[4946]: I1204 15:17:16.372102 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" event={"ID":"68203e19-3c15-4d99-a709-a7338b2f0dbc","Type":"ContainerDied","Data":"402c60a78a6b7bf4328fe357ebe9e23792006f4a60fd6dff1f1883307c3b181a"} Dec 04 15:17:17 crc kubenswrapper[4946]: I1204 15:17:17.381377 4946 generic.go:334] "Generic (PLEG): container finished" podID="68203e19-3c15-4d99-a709-a7338b2f0dbc" containerID="239f1f0057e1308f82f494f9ebbb3bcc8b37fbce11806693a7dbb0026826962c" exitCode=0 Dec 04 15:17:17 crc kubenswrapper[4946]: I1204 15:17:17.381430 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" event={"ID":"68203e19-3c15-4d99-a709-a7338b2f0dbc","Type":"ContainerDied","Data":"239f1f0057e1308f82f494f9ebbb3bcc8b37fbce11806693a7dbb0026826962c"} Dec 04 15:17:18 crc kubenswrapper[4946]: I1204 15:17:18.645166 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:18 crc kubenswrapper[4946]: I1204 15:17:18.835227 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-util\") pod \"68203e19-3c15-4d99-a709-a7338b2f0dbc\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " Dec 04 15:17:18 crc kubenswrapper[4946]: I1204 15:17:18.835341 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h999h\" (UniqueName: \"kubernetes.io/projected/68203e19-3c15-4d99-a709-a7338b2f0dbc-kube-api-access-h999h\") pod \"68203e19-3c15-4d99-a709-a7338b2f0dbc\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " Dec 04 15:17:18 crc kubenswrapper[4946]: I1204 15:17:18.835447 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-bundle\") pod \"68203e19-3c15-4d99-a709-a7338b2f0dbc\" (UID: \"68203e19-3c15-4d99-a709-a7338b2f0dbc\") " Dec 04 15:17:18 crc kubenswrapper[4946]: I1204 15:17:18.836243 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-bundle" (OuterVolumeSpecName: "bundle") pod "68203e19-3c15-4d99-a709-a7338b2f0dbc" (UID: "68203e19-3c15-4d99-a709-a7338b2f0dbc"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:17:18 crc kubenswrapper[4946]: I1204 15:17:18.841856 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68203e19-3c15-4d99-a709-a7338b2f0dbc-kube-api-access-h999h" (OuterVolumeSpecName: "kube-api-access-h999h") pod "68203e19-3c15-4d99-a709-a7338b2f0dbc" (UID: "68203e19-3c15-4d99-a709-a7338b2f0dbc"). InnerVolumeSpecName "kube-api-access-h999h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:17:18 crc kubenswrapper[4946]: I1204 15:17:18.851950 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-util" (OuterVolumeSpecName: "util") pod "68203e19-3c15-4d99-a709-a7338b2f0dbc" (UID: "68203e19-3c15-4d99-a709-a7338b2f0dbc"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:17:18 crc kubenswrapper[4946]: I1204 15:17:18.937288 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h999h\" (UniqueName: \"kubernetes.io/projected/68203e19-3c15-4d99-a709-a7338b2f0dbc-kube-api-access-h999h\") on node \"crc\" DevicePath \"\"" Dec 04 15:17:18 crc kubenswrapper[4946]: I1204 15:17:18.937343 4946 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:17:18 crc kubenswrapper[4946]: I1204 15:17:18.937354 4946 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/68203e19-3c15-4d99-a709-a7338b2f0dbc-util\") on node \"crc\" DevicePath \"\"" Dec 04 15:17:19 crc kubenswrapper[4946]: I1204 15:17:19.435152 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" event={"ID":"68203e19-3c15-4d99-a709-a7338b2f0dbc","Type":"ContainerDied","Data":"bc7360bff79f2f70f65ecd843acffc0300b4d9885d881c4f40ca0b01279269a0"} Dec 04 15:17:19 crc kubenswrapper[4946]: I1204 15:17:19.435200 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc7360bff79f2f70f65ecd843acffc0300b4d9885d881c4f40ca0b01279269a0" Dec 04 15:17:19 crc kubenswrapper[4946]: I1204 15:17:19.435299 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.084590 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-x8ccj"] Dec 04 15:17:21 crc kubenswrapper[4946]: E1204 15:17:21.085423 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68203e19-3c15-4d99-a709-a7338b2f0dbc" containerName="util" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.085461 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="68203e19-3c15-4d99-a709-a7338b2f0dbc" containerName="util" Dec 04 15:17:21 crc kubenswrapper[4946]: E1204 15:17:21.085473 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68203e19-3c15-4d99-a709-a7338b2f0dbc" containerName="pull" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.085479 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="68203e19-3c15-4d99-a709-a7338b2f0dbc" containerName="pull" Dec 04 15:17:21 crc kubenswrapper[4946]: E1204 15:17:21.085487 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68203e19-3c15-4d99-a709-a7338b2f0dbc" containerName="extract" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.085494 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="68203e19-3c15-4d99-a709-a7338b2f0dbc" containerName="extract" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.085640 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="68203e19-3c15-4d99-a709-a7338b2f0dbc" containerName="extract" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.086312 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-x8ccj" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.088793 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.089071 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-gxt5v" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.093085 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.102628 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-x8ccj"] Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.244553 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5ncj\" (UniqueName: \"kubernetes.io/projected/05dcb49b-4fdf-4fdb-b619-fc7649bb203d-kube-api-access-b5ncj\") pod \"nmstate-operator-5b5b58f5c8-x8ccj\" (UID: \"05dcb49b-4fdf-4fdb-b619-fc7649bb203d\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-x8ccj" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.346035 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5ncj\" (UniqueName: \"kubernetes.io/projected/05dcb49b-4fdf-4fdb-b619-fc7649bb203d-kube-api-access-b5ncj\") pod \"nmstate-operator-5b5b58f5c8-x8ccj\" (UID: \"05dcb49b-4fdf-4fdb-b619-fc7649bb203d\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-x8ccj" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.369915 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5ncj\" (UniqueName: \"kubernetes.io/projected/05dcb49b-4fdf-4fdb-b619-fc7649bb203d-kube-api-access-b5ncj\") pod \"nmstate-operator-5b5b58f5c8-x8ccj\" (UID: \"05dcb49b-4fdf-4fdb-b619-fc7649bb203d\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-x8ccj" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.417548 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-x8ccj" Dec 04 15:17:21 crc kubenswrapper[4946]: I1204 15:17:21.701614 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-x8ccj"] Dec 04 15:17:22 crc kubenswrapper[4946]: I1204 15:17:22.467463 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-x8ccj" event={"ID":"05dcb49b-4fdf-4fdb-b619-fc7649bb203d","Type":"ContainerStarted","Data":"6b697e85a8fb3c14f238483fed4f1eaa8993c6b48c4cbf732a8613cce32c08d6"} Dec 04 15:17:24 crc kubenswrapper[4946]: I1204 15:17:24.512789 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-x8ccj" event={"ID":"05dcb49b-4fdf-4fdb-b619-fc7649bb203d","Type":"ContainerStarted","Data":"ddf82b205d6fce8bb5432ce64364d2b6bf2d4ca6cc49b7aef51c11f292dfa985"} Dec 04 15:17:24 crc kubenswrapper[4946]: I1204 15:17:24.542668 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-x8ccj" podStartSLOduration=1.145044784 podStartE2EDuration="3.542646165s" podCreationTimestamp="2025-12-04 15:17:21 +0000 UTC" firstStartedPulling="2025-12-04 15:17:21.713014441 +0000 UTC m=+892.599058082" lastFinishedPulling="2025-12-04 15:17:24.110615822 +0000 UTC m=+894.996659463" observedRunningTime="2025-12-04 15:17:24.536683054 +0000 UTC m=+895.422726695" watchObservedRunningTime="2025-12-04 15:17:24.542646165 +0000 UTC m=+895.428689816" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.554488 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7"] Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.555753 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.560549 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-hgscn" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.565519 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7"] Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.572443 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w"] Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.573356 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.574866 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.607090 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-b6z2h"] Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.607865 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.647328 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w"] Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.717873 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw"] Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.719096 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.721380 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.721419 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.721886 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-jct57" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.730140 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/5ae26fa8-9751-40d0-b327-45011a9ec579-nmstate-lock\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.730212 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/5ae26fa8-9751-40d0-b327-45011a9ec579-dbus-socket\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.730237 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcn28\" (UniqueName: \"kubernetes.io/projected/4f22e89a-c84f-4f88-8718-2d3c7238324a-kube-api-access-bcn28\") pod \"nmstate-webhook-5f6d4c5ccb-cb89w\" (UID: \"4f22e89a-c84f-4f88-8718-2d3c7238324a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.730257 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4f22e89a-c84f-4f88-8718-2d3c7238324a-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-cb89w\" (UID: \"4f22e89a-c84f-4f88-8718-2d3c7238324a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.730288 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/5ae26fa8-9751-40d0-b327-45011a9ec579-ovs-socket\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.730306 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrfcm\" (UniqueName: \"kubernetes.io/projected/70f33645-a744-4196-a5d0-e577c90023d5-kube-api-access-mrfcm\") pod \"nmstate-metrics-7f946cbc9-bxmt7\" (UID: \"70f33645-a744-4196-a5d0-e577c90023d5\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.730435 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v9l6\" (UniqueName: \"kubernetes.io/projected/5ae26fa8-9751-40d0-b327-45011a9ec579-kube-api-access-5v9l6\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.733679 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw"] Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.831269 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/cc926dea-6324-4350-bf4c-6f4142b2547b-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-ms9gw\" (UID: \"cc926dea-6324-4350-bf4c-6f4142b2547b\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.831331 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v9l6\" (UniqueName: \"kubernetes.io/projected/5ae26fa8-9751-40d0-b327-45011a9ec579-kube-api-access-5v9l6\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.831389 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx5q2\" (UniqueName: \"kubernetes.io/projected/cc926dea-6324-4350-bf4c-6f4142b2547b-kube-api-access-lx5q2\") pod \"nmstate-console-plugin-7fbb5f6569-ms9gw\" (UID: \"cc926dea-6324-4350-bf4c-6f4142b2547b\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.831451 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/5ae26fa8-9751-40d0-b327-45011a9ec579-nmstate-lock\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.831497 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/cc926dea-6324-4350-bf4c-6f4142b2547b-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-ms9gw\" (UID: \"cc926dea-6324-4350-bf4c-6f4142b2547b\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.831555 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/5ae26fa8-9751-40d0-b327-45011a9ec579-dbus-socket\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.831582 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcn28\" (UniqueName: \"kubernetes.io/projected/4f22e89a-c84f-4f88-8718-2d3c7238324a-kube-api-access-bcn28\") pod \"nmstate-webhook-5f6d4c5ccb-cb89w\" (UID: \"4f22e89a-c84f-4f88-8718-2d3c7238324a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.831605 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4f22e89a-c84f-4f88-8718-2d3c7238324a-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-cb89w\" (UID: \"4f22e89a-c84f-4f88-8718-2d3c7238324a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.831636 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/5ae26fa8-9751-40d0-b327-45011a9ec579-ovs-socket\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.831660 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrfcm\" (UniqueName: \"kubernetes.io/projected/70f33645-a744-4196-a5d0-e577c90023d5-kube-api-access-mrfcm\") pod \"nmstate-metrics-7f946cbc9-bxmt7\" (UID: \"70f33645-a744-4196-a5d0-e577c90023d5\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.831975 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/5ae26fa8-9751-40d0-b327-45011a9ec579-nmstate-lock\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.832083 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/5ae26fa8-9751-40d0-b327-45011a9ec579-ovs-socket\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.832307 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/5ae26fa8-9751-40d0-b327-45011a9ec579-dbus-socket\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.844830 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4f22e89a-c84f-4f88-8718-2d3c7238324a-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-cb89w\" (UID: \"4f22e89a-c84f-4f88-8718-2d3c7238324a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.849518 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v9l6\" (UniqueName: \"kubernetes.io/projected/5ae26fa8-9751-40d0-b327-45011a9ec579-kube-api-access-5v9l6\") pod \"nmstate-handler-b6z2h\" (UID: \"5ae26fa8-9751-40d0-b327-45011a9ec579\") " pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.856708 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrfcm\" (UniqueName: \"kubernetes.io/projected/70f33645-a744-4196-a5d0-e577c90023d5-kube-api-access-mrfcm\") pod \"nmstate-metrics-7f946cbc9-bxmt7\" (UID: \"70f33645-a744-4196-a5d0-e577c90023d5\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.863277 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcn28\" (UniqueName: \"kubernetes.io/projected/4f22e89a-c84f-4f88-8718-2d3c7238324a-kube-api-access-bcn28\") pod \"nmstate-webhook-5f6d4c5ccb-cb89w\" (UID: \"4f22e89a-c84f-4f88-8718-2d3c7238324a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.933448 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/cc926dea-6324-4350-bf4c-6f4142b2547b-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-ms9gw\" (UID: \"cc926dea-6324-4350-bf4c-6f4142b2547b\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.933522 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx5q2\" (UniqueName: \"kubernetes.io/projected/cc926dea-6324-4350-bf4c-6f4142b2547b-kube-api-access-lx5q2\") pod \"nmstate-console-plugin-7fbb5f6569-ms9gw\" (UID: \"cc926dea-6324-4350-bf4c-6f4142b2547b\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.933580 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/cc926dea-6324-4350-bf4c-6f4142b2547b-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-ms9gw\" (UID: \"cc926dea-6324-4350-bf4c-6f4142b2547b\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.934700 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/cc926dea-6324-4350-bf4c-6f4142b2547b-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-ms9gw\" (UID: \"cc926dea-6324-4350-bf4c-6f4142b2547b\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.938826 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/cc926dea-6324-4350-bf4c-6f4142b2547b-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-ms9gw\" (UID: \"cc926dea-6324-4350-bf4c-6f4142b2547b\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.942706 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.954063 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.964638 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:25 crc kubenswrapper[4946]: I1204 15:17:25.982652 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx5q2\" (UniqueName: \"kubernetes.io/projected/cc926dea-6324-4350-bf4c-6f4142b2547b-kube-api-access-lx5q2\") pod \"nmstate-console-plugin-7fbb5f6569-ms9gw\" (UID: \"cc926dea-6324-4350-bf4c-6f4142b2547b\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.025156 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6dfd4c55cf-4gpqh"] Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.036659 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.039880 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.111234 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6dfd4c55cf-4gpqh"] Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.139655 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9cb4\" (UniqueName: \"kubernetes.io/projected/fbfda3c4-f013-4075-9ad0-879052b31d17-kube-api-access-x9cb4\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.139758 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fbfda3c4-f013-4075-9ad0-879052b31d17-console-serving-cert\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.139791 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fbfda3c4-f013-4075-9ad0-879052b31d17-console-oauth-config\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.139818 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-service-ca\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.139839 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-console-config\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.139859 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-trusted-ca-bundle\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.139877 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-oauth-serving-cert\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.245201 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-service-ca\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.245631 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-console-config\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.245663 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-trusted-ca-bundle\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.245697 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-oauth-serving-cert\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.245730 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9cb4\" (UniqueName: \"kubernetes.io/projected/fbfda3c4-f013-4075-9ad0-879052b31d17-kube-api-access-x9cb4\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.245792 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fbfda3c4-f013-4075-9ad0-879052b31d17-console-serving-cert\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.245836 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fbfda3c4-f013-4075-9ad0-879052b31d17-console-oauth-config\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.247911 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-service-ca\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.248207 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-oauth-serving-cert\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.248745 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-console-config\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.248979 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbfda3c4-f013-4075-9ad0-879052b31d17-trusted-ca-bundle\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.259959 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fbfda3c4-f013-4075-9ad0-879052b31d17-console-serving-cert\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.260140 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fbfda3c4-f013-4075-9ad0-879052b31d17-console-oauth-config\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.268263 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9cb4\" (UniqueName: \"kubernetes.io/projected/fbfda3c4-f013-4075-9ad0-879052b31d17-kube-api-access-x9cb4\") pod \"console-6dfd4c55cf-4gpqh\" (UID: \"fbfda3c4-f013-4075-9ad0-879052b31d17\") " pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.358709 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7"] Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.396431 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.494105 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw"] Dec 04 15:17:26 crc kubenswrapper[4946]: W1204 15:17:26.510660 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc926dea_6324_4350_bf4c_6f4142b2547b.slice/crio-dafac989790be8e419ba78f688873686b99671bea5a598af11b3d6dafa10c4a8 WatchSource:0}: Error finding container dafac989790be8e419ba78f688873686b99671bea5a598af11b3d6dafa10c4a8: Status 404 returned error can't find the container with id dafac989790be8e419ba78f688873686b99671bea5a598af11b3d6dafa10c4a8 Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.525267 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" event={"ID":"cc926dea-6324-4350-bf4c-6f4142b2547b","Type":"ContainerStarted","Data":"dafac989790be8e419ba78f688873686b99671bea5a598af11b3d6dafa10c4a8"} Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.526785 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-b6z2h" event={"ID":"5ae26fa8-9751-40d0-b327-45011a9ec579","Type":"ContainerStarted","Data":"b7da2053ccd05f947bd9da752d30c736cffb973ad1f5a765a9f42912dc1bf9ba"} Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.528856 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7" event={"ID":"70f33645-a744-4196-a5d0-e577c90023d5","Type":"ContainerStarted","Data":"8d861b98660f3c89dcdc0b94b0c6479b1bf64cac89db9f894ea8bb622dabfcb1"} Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.621665 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6dfd4c55cf-4gpqh"] Dec 04 15:17:26 crc kubenswrapper[4946]: I1204 15:17:26.660053 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w"] Dec 04 15:17:26 crc kubenswrapper[4946]: W1204 15:17:26.661084 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f22e89a_c84f_4f88_8718_2d3c7238324a.slice/crio-c9c2264437a0313ccd4701b057d5dd0df9b33cf7ee9f9e1051cf4433eba00639 WatchSource:0}: Error finding container c9c2264437a0313ccd4701b057d5dd0df9b33cf7ee9f9e1051cf4433eba00639: Status 404 returned error can't find the container with id c9c2264437a0313ccd4701b057d5dd0df9b33cf7ee9f9e1051cf4433eba00639 Dec 04 15:17:27 crc kubenswrapper[4946]: I1204 15:17:27.537242 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6dfd4c55cf-4gpqh" event={"ID":"fbfda3c4-f013-4075-9ad0-879052b31d17","Type":"ContainerStarted","Data":"8d5423f4e3a9c8cf6d062b2d8b28884b3dcd069ee99b116ff630e4afc17363e2"} Dec 04 15:17:27 crc kubenswrapper[4946]: I1204 15:17:27.538091 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6dfd4c55cf-4gpqh" event={"ID":"fbfda3c4-f013-4075-9ad0-879052b31d17","Type":"ContainerStarted","Data":"f085113b636cdfe73e325be870083b14cca63af70ad9a688fe36374945d0c78a"} Dec 04 15:17:27 crc kubenswrapper[4946]: I1204 15:17:27.539288 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" event={"ID":"4f22e89a-c84f-4f88-8718-2d3c7238324a","Type":"ContainerStarted","Data":"c9c2264437a0313ccd4701b057d5dd0df9b33cf7ee9f9e1051cf4433eba00639"} Dec 04 15:17:29 crc kubenswrapper[4946]: I1204 15:17:29.484133 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6dfd4c55cf-4gpqh" podStartSLOduration=4.484081915 podStartE2EDuration="4.484081915s" podCreationTimestamp="2025-12-04 15:17:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:17:27.56310269 +0000 UTC m=+898.449146331" watchObservedRunningTime="2025-12-04 15:17:29.484081915 +0000 UTC m=+900.370125556" Dec 04 15:17:31 crc kubenswrapper[4946]: I1204 15:17:31.574746 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7" event={"ID":"70f33645-a744-4196-a5d0-e577c90023d5","Type":"ContainerStarted","Data":"2481f89e10f144af9cd09cecfd3dd5a364180feca0d9978a28ee2b184f09faa7"} Dec 04 15:17:31 crc kubenswrapper[4946]: I1204 15:17:31.577219 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-b6z2h" event={"ID":"5ae26fa8-9751-40d0-b327-45011a9ec579","Type":"ContainerStarted","Data":"e9b79398f606c7d6a5ace85504b4b34aa2a9d13edf52e7db2c4a0c6ad568469c"} Dec 04 15:17:31 crc kubenswrapper[4946]: I1204 15:17:31.577362 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:31 crc kubenswrapper[4946]: I1204 15:17:31.579974 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" event={"ID":"4f22e89a-c84f-4f88-8718-2d3c7238324a","Type":"ContainerStarted","Data":"94ffdf8a771e2713bed223d026c6eec8685aaf5e82375e8cd059ea5f2b59fcfa"} Dec 04 15:17:31 crc kubenswrapper[4946]: I1204 15:17:31.580138 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" Dec 04 15:17:31 crc kubenswrapper[4946]: I1204 15:17:31.581713 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" event={"ID":"cc926dea-6324-4350-bf4c-6f4142b2547b","Type":"ContainerStarted","Data":"e3feaf9f0d8b37f193958e608a2cb9ae8bde46f10ab451be4043f2ca6ea20f11"} Dec 04 15:17:31 crc kubenswrapper[4946]: I1204 15:17:31.599405 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-b6z2h" podStartSLOduration=1.787906613 podStartE2EDuration="6.599377146s" podCreationTimestamp="2025-12-04 15:17:25 +0000 UTC" firstStartedPulling="2025-12-04 15:17:26.095195854 +0000 UTC m=+896.981239495" lastFinishedPulling="2025-12-04 15:17:30.906666387 +0000 UTC m=+901.792710028" observedRunningTime="2025-12-04 15:17:31.592260344 +0000 UTC m=+902.478303985" watchObservedRunningTime="2025-12-04 15:17:31.599377146 +0000 UTC m=+902.485420787" Dec 04 15:17:31 crc kubenswrapper[4946]: I1204 15:17:31.616453 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" podStartSLOduration=2.373721457 podStartE2EDuration="6.616422076s" podCreationTimestamp="2025-12-04 15:17:25 +0000 UTC" firstStartedPulling="2025-12-04 15:17:26.663918827 +0000 UTC m=+897.549962468" lastFinishedPulling="2025-12-04 15:17:30.906619446 +0000 UTC m=+901.792663087" observedRunningTime="2025-12-04 15:17:31.615775529 +0000 UTC m=+902.501819170" watchObservedRunningTime="2025-12-04 15:17:31.616422076 +0000 UTC m=+902.502465737" Dec 04 15:17:31 crc kubenswrapper[4946]: I1204 15:17:31.639723 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-ms9gw" podStartSLOduration=2.246430611 podStartE2EDuration="6.639696835s" podCreationTimestamp="2025-12-04 15:17:25 +0000 UTC" firstStartedPulling="2025-12-04 15:17:26.514172314 +0000 UTC m=+897.400215955" lastFinishedPulling="2025-12-04 15:17:30.907438538 +0000 UTC m=+901.793482179" observedRunningTime="2025-12-04 15:17:31.633315192 +0000 UTC m=+902.519358833" watchObservedRunningTime="2025-12-04 15:17:31.639696835 +0000 UTC m=+902.525740486" Dec 04 15:17:33 crc kubenswrapper[4946]: I1204 15:17:33.596905 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7" event={"ID":"70f33645-a744-4196-a5d0-e577c90023d5","Type":"ContainerStarted","Data":"a3902d8b23787f243254821823998dbae62988940c36e9bac5b356425f4a908b"} Dec 04 15:17:36 crc kubenswrapper[4946]: I1204 15:17:36.397173 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:36 crc kubenswrapper[4946]: I1204 15:17:36.398257 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:36 crc kubenswrapper[4946]: I1204 15:17:36.402490 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:36 crc kubenswrapper[4946]: I1204 15:17:36.432108 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-bxmt7" podStartSLOduration=4.475583197 podStartE2EDuration="11.432076632s" podCreationTimestamp="2025-12-04 15:17:25 +0000 UTC" firstStartedPulling="2025-12-04 15:17:26.368489212 +0000 UTC m=+897.254532853" lastFinishedPulling="2025-12-04 15:17:33.324982647 +0000 UTC m=+904.211026288" observedRunningTime="2025-12-04 15:17:33.624673087 +0000 UTC m=+904.510716738" watchObservedRunningTime="2025-12-04 15:17:36.432076632 +0000 UTC m=+907.318120293" Dec 04 15:17:36 crc kubenswrapper[4946]: I1204 15:17:36.630841 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6dfd4c55cf-4gpqh" Dec 04 15:17:36 crc kubenswrapper[4946]: I1204 15:17:36.690573 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-v4qw8"] Dec 04 15:17:40 crc kubenswrapper[4946]: I1204 15:17:40.991264 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-b6z2h" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.240426 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lxtbh"] Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.245335 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.261703 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lxtbh"] Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.399639 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-catalog-content\") pod \"community-operators-lxtbh\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.399804 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfs4s\" (UniqueName: \"kubernetes.io/projected/eff15647-166a-494d-bbd2-a706b86cab0e-kube-api-access-hfs4s\") pod \"community-operators-lxtbh\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.399855 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-utilities\") pod \"community-operators-lxtbh\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.500666 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfs4s\" (UniqueName: \"kubernetes.io/projected/eff15647-166a-494d-bbd2-a706b86cab0e-kube-api-access-hfs4s\") pod \"community-operators-lxtbh\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.500751 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-utilities\") pod \"community-operators-lxtbh\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.500806 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-catalog-content\") pod \"community-operators-lxtbh\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.501514 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-catalog-content\") pod \"community-operators-lxtbh\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.501587 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-utilities\") pod \"community-operators-lxtbh\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.524316 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfs4s\" (UniqueName: \"kubernetes.io/projected/eff15647-166a-494d-bbd2-a706b86cab0e-kube-api-access-hfs4s\") pod \"community-operators-lxtbh\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.572981 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.947595 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lxtbh"] Dec 04 15:17:45 crc kubenswrapper[4946]: I1204 15:17:45.960012 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-cb89w" Dec 04 15:17:45 crc kubenswrapper[4946]: W1204 15:17:45.969931 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff15647_166a_494d_bbd2_a706b86cab0e.slice/crio-95ef619a5761694f6d480d30a2a8b305dcd73439422a97bd1a18f6714362f0b4 WatchSource:0}: Error finding container 95ef619a5761694f6d480d30a2a8b305dcd73439422a97bd1a18f6714362f0b4: Status 404 returned error can't find the container with id 95ef619a5761694f6d480d30a2a8b305dcd73439422a97bd1a18f6714362f0b4 Dec 04 15:17:46 crc kubenswrapper[4946]: I1204 15:17:46.688904 4946 generic.go:334] "Generic (PLEG): container finished" podID="eff15647-166a-494d-bbd2-a706b86cab0e" containerID="a576e3400f76572eb48d4267bc19c8bc7270d55d7755feb6c602a76743784d58" exitCode=0 Dec 04 15:17:46 crc kubenswrapper[4946]: I1204 15:17:46.689048 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxtbh" event={"ID":"eff15647-166a-494d-bbd2-a706b86cab0e","Type":"ContainerDied","Data":"a576e3400f76572eb48d4267bc19c8bc7270d55d7755feb6c602a76743784d58"} Dec 04 15:17:46 crc kubenswrapper[4946]: I1204 15:17:46.689295 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxtbh" event={"ID":"eff15647-166a-494d-bbd2-a706b86cab0e","Type":"ContainerStarted","Data":"95ef619a5761694f6d480d30a2a8b305dcd73439422a97bd1a18f6714362f0b4"} Dec 04 15:17:47 crc kubenswrapper[4946]: I1204 15:17:47.699341 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxtbh" event={"ID":"eff15647-166a-494d-bbd2-a706b86cab0e","Type":"ContainerStarted","Data":"2cb66467dbb39688939b51a326e3034c803ba8f96d7a7f38ca3cabd442142c7a"} Dec 04 15:17:48 crc kubenswrapper[4946]: I1204 15:17:48.706693 4946 generic.go:334] "Generic (PLEG): container finished" podID="eff15647-166a-494d-bbd2-a706b86cab0e" containerID="2cb66467dbb39688939b51a326e3034c803ba8f96d7a7f38ca3cabd442142c7a" exitCode=0 Dec 04 15:17:48 crc kubenswrapper[4946]: I1204 15:17:48.706739 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxtbh" event={"ID":"eff15647-166a-494d-bbd2-a706b86cab0e","Type":"ContainerDied","Data":"2cb66467dbb39688939b51a326e3034c803ba8f96d7a7f38ca3cabd442142c7a"} Dec 04 15:17:49 crc kubenswrapper[4946]: I1204 15:17:49.715533 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxtbh" event={"ID":"eff15647-166a-494d-bbd2-a706b86cab0e","Type":"ContainerStarted","Data":"3fe66651f5d60607a11beb3cabbc8419445314d73aa443da38dc3c1d351a8c47"} Dec 04 15:17:49 crc kubenswrapper[4946]: I1204 15:17:49.738884 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lxtbh" podStartSLOduration=2.2621728660000002 podStartE2EDuration="4.738860939s" podCreationTimestamp="2025-12-04 15:17:45 +0000 UTC" firstStartedPulling="2025-12-04 15:17:46.690878466 +0000 UTC m=+917.576922097" lastFinishedPulling="2025-12-04 15:17:49.167566529 +0000 UTC m=+920.053610170" observedRunningTime="2025-12-04 15:17:49.734499781 +0000 UTC m=+920.620543432" watchObservedRunningTime="2025-12-04 15:17:49.738860939 +0000 UTC m=+920.624904580" Dec 04 15:17:55 crc kubenswrapper[4946]: I1204 15:17:55.574165 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:55 crc kubenswrapper[4946]: I1204 15:17:55.574628 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:55 crc kubenswrapper[4946]: I1204 15:17:55.622447 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:55 crc kubenswrapper[4946]: I1204 15:17:55.798935 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:17:55 crc kubenswrapper[4946]: I1204 15:17:55.863601 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lxtbh"] Dec 04 15:17:57 crc kubenswrapper[4946]: I1204 15:17:57.775323 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lxtbh" podUID="eff15647-166a-494d-bbd2-a706b86cab0e" containerName="registry-server" containerID="cri-o://3fe66651f5d60607a11beb3cabbc8419445314d73aa443da38dc3c1d351a8c47" gracePeriod=2 Dec 04 15:17:58 crc kubenswrapper[4946]: I1204 15:17:58.786101 4946 generic.go:334] "Generic (PLEG): container finished" podID="eff15647-166a-494d-bbd2-a706b86cab0e" containerID="3fe66651f5d60607a11beb3cabbc8419445314d73aa443da38dc3c1d351a8c47" exitCode=0 Dec 04 15:17:58 crc kubenswrapper[4946]: I1204 15:17:58.786253 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxtbh" event={"ID":"eff15647-166a-494d-bbd2-a706b86cab0e","Type":"ContainerDied","Data":"3fe66651f5d60607a11beb3cabbc8419445314d73aa443da38dc3c1d351a8c47"} Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.170488 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.360840 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-utilities\") pod \"eff15647-166a-494d-bbd2-a706b86cab0e\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.360990 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfs4s\" (UniqueName: \"kubernetes.io/projected/eff15647-166a-494d-bbd2-a706b86cab0e-kube-api-access-hfs4s\") pod \"eff15647-166a-494d-bbd2-a706b86cab0e\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.361047 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-catalog-content\") pod \"eff15647-166a-494d-bbd2-a706b86cab0e\" (UID: \"eff15647-166a-494d-bbd2-a706b86cab0e\") " Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.362304 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-utilities" (OuterVolumeSpecName: "utilities") pod "eff15647-166a-494d-bbd2-a706b86cab0e" (UID: "eff15647-166a-494d-bbd2-a706b86cab0e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.367970 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eff15647-166a-494d-bbd2-a706b86cab0e-kube-api-access-hfs4s" (OuterVolumeSpecName: "kube-api-access-hfs4s") pod "eff15647-166a-494d-bbd2-a706b86cab0e" (UID: "eff15647-166a-494d-bbd2-a706b86cab0e"). InnerVolumeSpecName "kube-api-access-hfs4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.411989 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eff15647-166a-494d-bbd2-a706b86cab0e" (UID: "eff15647-166a-494d-bbd2-a706b86cab0e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.464224 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.464332 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfs4s\" (UniqueName: \"kubernetes.io/projected/eff15647-166a-494d-bbd2-a706b86cab0e-kube-api-access-hfs4s\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.464350 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff15647-166a-494d-bbd2-a706b86cab0e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.803869 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lxtbh" event={"ID":"eff15647-166a-494d-bbd2-a706b86cab0e","Type":"ContainerDied","Data":"95ef619a5761694f6d480d30a2a8b305dcd73439422a97bd1a18f6714362f0b4"} Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.803953 4946 scope.go:117] "RemoveContainer" containerID="3fe66651f5d60607a11beb3cabbc8419445314d73aa443da38dc3c1d351a8c47" Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.803951 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lxtbh" Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.834428 4946 scope.go:117] "RemoveContainer" containerID="2cb66467dbb39688939b51a326e3034c803ba8f96d7a7f38ca3cabd442142c7a" Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.838601 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lxtbh"] Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.848200 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lxtbh"] Dec 04 15:18:00 crc kubenswrapper[4946]: I1204 15:18:00.867513 4946 scope.go:117] "RemoveContainer" containerID="a576e3400f76572eb48d4267bc19c8bc7270d55d7755feb6c602a76743784d58" Dec 04 15:18:01 crc kubenswrapper[4946]: I1204 15:18:01.462189 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eff15647-166a-494d-bbd2-a706b86cab0e" path="/var/lib/kubelet/pods/eff15647-166a-494d-bbd2-a706b86cab0e/volumes" Dec 04 15:18:01 crc kubenswrapper[4946]: I1204 15:18:01.739157 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-v4qw8" podUID="12dc3c7b-da6c-46a0-b0c9-d0899e46837a" containerName="console" containerID="cri-o://53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742" gracePeriod=15 Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.169771 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-v4qw8_12dc3c7b-da6c-46a0-b0c9-d0899e46837a/console/0.log" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.170364 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.193834 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-config\") pod \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.193914 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-service-ca\") pod \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.193957 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-trusted-ca-bundle\") pod \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.194004 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjvqx\" (UniqueName: \"kubernetes.io/projected/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-kube-api-access-rjvqx\") pod \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.195054 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-service-ca" (OuterVolumeSpecName: "service-ca") pod "12dc3c7b-da6c-46a0-b0c9-d0899e46837a" (UID: "12dc3c7b-da6c-46a0-b0c9-d0899e46837a"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.195084 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "12dc3c7b-da6c-46a0-b0c9-d0899e46837a" (UID: "12dc3c7b-da6c-46a0-b0c9-d0899e46837a"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.195198 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "12dc3c7b-da6c-46a0-b0c9-d0899e46837a" (UID: "12dc3c7b-da6c-46a0-b0c9-d0899e46837a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.195297 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-oauth-serving-cert\") pod \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.195406 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-oauth-config\") pod \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.195459 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-serving-cert\") pod \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\" (UID: \"12dc3c7b-da6c-46a0-b0c9-d0899e46837a\") " Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.195540 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-config" (OuterVolumeSpecName: "console-config") pod "12dc3c7b-da6c-46a0-b0c9-d0899e46837a" (UID: "12dc3c7b-da6c-46a0-b0c9-d0899e46837a"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.195793 4946 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.195819 4946 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.195831 4946 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.195843 4946 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.204875 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "12dc3c7b-da6c-46a0-b0c9-d0899e46837a" (UID: "12dc3c7b-da6c-46a0-b0c9-d0899e46837a"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.207297 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "12dc3c7b-da6c-46a0-b0c9-d0899e46837a" (UID: "12dc3c7b-da6c-46a0-b0c9-d0899e46837a"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.218667 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-kube-api-access-rjvqx" (OuterVolumeSpecName: "kube-api-access-rjvqx") pod "12dc3c7b-da6c-46a0-b0c9-d0899e46837a" (UID: "12dc3c7b-da6c-46a0-b0c9-d0899e46837a"). InnerVolumeSpecName "kube-api-access-rjvqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.297280 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjvqx\" (UniqueName: \"kubernetes.io/projected/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-kube-api-access-rjvqx\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.297352 4946 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.297366 4946 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/12dc3c7b-da6c-46a0-b0c9-d0899e46837a-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.467377 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ns4kg"] Dec 04 15:18:02 crc kubenswrapper[4946]: E1204 15:18:02.467653 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12dc3c7b-da6c-46a0-b0c9-d0899e46837a" containerName="console" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.467667 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="12dc3c7b-da6c-46a0-b0c9-d0899e46837a" containerName="console" Dec 04 15:18:02 crc kubenswrapper[4946]: E1204 15:18:02.467686 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff15647-166a-494d-bbd2-a706b86cab0e" containerName="registry-server" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.467693 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff15647-166a-494d-bbd2-a706b86cab0e" containerName="registry-server" Dec 04 15:18:02 crc kubenswrapper[4946]: E1204 15:18:02.467708 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff15647-166a-494d-bbd2-a706b86cab0e" containerName="extract-content" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.467715 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff15647-166a-494d-bbd2-a706b86cab0e" containerName="extract-content" Dec 04 15:18:02 crc kubenswrapper[4946]: E1204 15:18:02.467723 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff15647-166a-494d-bbd2-a706b86cab0e" containerName="extract-utilities" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.467730 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff15647-166a-494d-bbd2-a706b86cab0e" containerName="extract-utilities" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.467858 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="eff15647-166a-494d-bbd2-a706b86cab0e" containerName="registry-server" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.467882 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="12dc3c7b-da6c-46a0-b0c9-d0899e46837a" containerName="console" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.469018 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.489711 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ns4kg"] Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.499571 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-utilities\") pod \"redhat-marketplace-ns4kg\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.499648 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69khg\" (UniqueName: \"kubernetes.io/projected/8cd27802-0a2d-410a-93a0-d979a81d92d4-kube-api-access-69khg\") pod \"redhat-marketplace-ns4kg\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.499725 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-catalog-content\") pod \"redhat-marketplace-ns4kg\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.600971 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-catalog-content\") pod \"redhat-marketplace-ns4kg\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.601052 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-utilities\") pod \"redhat-marketplace-ns4kg\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.601140 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69khg\" (UniqueName: \"kubernetes.io/projected/8cd27802-0a2d-410a-93a0-d979a81d92d4-kube-api-access-69khg\") pod \"redhat-marketplace-ns4kg\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.601568 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-catalog-content\") pod \"redhat-marketplace-ns4kg\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.601802 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-utilities\") pod \"redhat-marketplace-ns4kg\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.623454 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69khg\" (UniqueName: \"kubernetes.io/projected/8cd27802-0a2d-410a-93a0-d979a81d92d4-kube-api-access-69khg\") pod \"redhat-marketplace-ns4kg\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.789335 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.851483 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-v4qw8_12dc3c7b-da6c-46a0-b0c9-d0899e46837a/console/0.log" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.851993 4946 generic.go:334] "Generic (PLEG): container finished" podID="12dc3c7b-da6c-46a0-b0c9-d0899e46837a" containerID="53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742" exitCode=2 Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.852033 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-v4qw8" event={"ID":"12dc3c7b-da6c-46a0-b0c9-d0899e46837a","Type":"ContainerDied","Data":"53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742"} Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.852072 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-v4qw8" event={"ID":"12dc3c7b-da6c-46a0-b0c9-d0899e46837a","Type":"ContainerDied","Data":"e2d891f4735f62c1b4f5c9a46d1a05bd7bfc9419885389feaa5b027a95708a3c"} Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.852092 4946 scope.go:117] "RemoveContainer" containerID="53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.852253 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-v4qw8" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.924687 4946 scope.go:117] "RemoveContainer" containerID="53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742" Dec 04 15:18:02 crc kubenswrapper[4946]: E1204 15:18:02.928332 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742\": container with ID starting with 53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742 not found: ID does not exist" containerID="53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.928388 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742"} err="failed to get container status \"53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742\": rpc error: code = NotFound desc = could not find container \"53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742\": container with ID starting with 53d69ae3b42ec9359ec8343288ee6efa00e662745bd470dd92f27afbdeba1742 not found: ID does not exist" Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.946196 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-v4qw8"] Dec 04 15:18:02 crc kubenswrapper[4946]: I1204 15:18:02.952581 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-v4qw8"] Dec 04 15:18:03 crc kubenswrapper[4946]: I1204 15:18:03.228920 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ns4kg"] Dec 04 15:18:03 crc kubenswrapper[4946]: I1204 15:18:03.465709 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12dc3c7b-da6c-46a0-b0c9-d0899e46837a" path="/var/lib/kubelet/pods/12dc3c7b-da6c-46a0-b0c9-d0899e46837a/volumes" Dec 04 15:18:03 crc kubenswrapper[4946]: I1204 15:18:03.860012 4946 generic.go:334] "Generic (PLEG): container finished" podID="8cd27802-0a2d-410a-93a0-d979a81d92d4" containerID="c2b9a89674fc856e5834b30de1a34c340dc73e020664a4090f41e1b30646af76" exitCode=0 Dec 04 15:18:03 crc kubenswrapper[4946]: I1204 15:18:03.860152 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ns4kg" event={"ID":"8cd27802-0a2d-410a-93a0-d979a81d92d4","Type":"ContainerDied","Data":"c2b9a89674fc856e5834b30de1a34c340dc73e020664a4090f41e1b30646af76"} Dec 04 15:18:03 crc kubenswrapper[4946]: I1204 15:18:03.860271 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ns4kg" event={"ID":"8cd27802-0a2d-410a-93a0-d979a81d92d4","Type":"ContainerStarted","Data":"0b19119b015f1a241f82c7fdb477210a579680e3f790025d0e13e765811fac79"} Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.056940 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv"] Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.058989 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.061390 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.075644 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv"] Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.123729 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.123794 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85h4b\" (UniqueName: \"kubernetes.io/projected/7349bd68-959a-4268-a194-f55f10061076-kube-api-access-85h4b\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.123832 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.225458 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85h4b\" (UniqueName: \"kubernetes.io/projected/7349bd68-959a-4268-a194-f55f10061076-kube-api-access-85h4b\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.225512 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.225583 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.226218 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.226218 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.252079 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85h4b\" (UniqueName: \"kubernetes.io/projected/7349bd68-959a-4268-a194-f55f10061076-kube-api-access-85h4b\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.383024 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.871870 4946 generic.go:334] "Generic (PLEG): container finished" podID="8cd27802-0a2d-410a-93a0-d979a81d92d4" containerID="ff74a0d8c7f8d5f39437d583e8e44cb7fa9e452ef5a943d3c092bdae2c0307a6" exitCode=0 Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.871950 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ns4kg" event={"ID":"8cd27802-0a2d-410a-93a0-d979a81d92d4","Type":"ContainerDied","Data":"ff74a0d8c7f8d5f39437d583e8e44cb7fa9e452ef5a943d3c092bdae2c0307a6"} Dec 04 15:18:04 crc kubenswrapper[4946]: I1204 15:18:04.875316 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv"] Dec 04 15:18:04 crc kubenswrapper[4946]: W1204 15:18:04.894761 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7349bd68_959a_4268_a194_f55f10061076.slice/crio-2b88504139ae4ed836e680f2b1f252ee552337679cb73cfd549573e94c573300 WatchSource:0}: Error finding container 2b88504139ae4ed836e680f2b1f252ee552337679cb73cfd549573e94c573300: Status 404 returned error can't find the container with id 2b88504139ae4ed836e680f2b1f252ee552337679cb73cfd549573e94c573300 Dec 04 15:18:05 crc kubenswrapper[4946]: I1204 15:18:05.880366 4946 generic.go:334] "Generic (PLEG): container finished" podID="7349bd68-959a-4268-a194-f55f10061076" containerID="c215ab1eeccd2f03d168ca5b1ebfd1ed736e9e19b368738ed7ee86840286338e" exitCode=0 Dec 04 15:18:05 crc kubenswrapper[4946]: I1204 15:18:05.880473 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" event={"ID":"7349bd68-959a-4268-a194-f55f10061076","Type":"ContainerDied","Data":"c215ab1eeccd2f03d168ca5b1ebfd1ed736e9e19b368738ed7ee86840286338e"} Dec 04 15:18:05 crc kubenswrapper[4946]: I1204 15:18:05.880793 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" event={"ID":"7349bd68-959a-4268-a194-f55f10061076","Type":"ContainerStarted","Data":"2b88504139ae4ed836e680f2b1f252ee552337679cb73cfd549573e94c573300"} Dec 04 15:18:05 crc kubenswrapper[4946]: I1204 15:18:05.883673 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ns4kg" event={"ID":"8cd27802-0a2d-410a-93a0-d979a81d92d4","Type":"ContainerStarted","Data":"98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3"} Dec 04 15:18:05 crc kubenswrapper[4946]: I1204 15:18:05.922318 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ns4kg" podStartSLOduration=2.431957395 podStartE2EDuration="3.922297382s" podCreationTimestamp="2025-12-04 15:18:02 +0000 UTC" firstStartedPulling="2025-12-04 15:18:03.862485634 +0000 UTC m=+934.748529275" lastFinishedPulling="2025-12-04 15:18:05.352825621 +0000 UTC m=+936.238869262" observedRunningTime="2025-12-04 15:18:05.91820033 +0000 UTC m=+936.804243991" watchObservedRunningTime="2025-12-04 15:18:05.922297382 +0000 UTC m=+936.808341033" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.225257 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-twc2v"] Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.227052 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.249012 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-twc2v"] Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.315679 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-catalog-content\") pod \"certified-operators-twc2v\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.315744 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlvs2\" (UniqueName: \"kubernetes.io/projected/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-kube-api-access-qlvs2\") pod \"certified-operators-twc2v\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.315772 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-utilities\") pod \"certified-operators-twc2v\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.417220 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-utilities\") pod \"certified-operators-twc2v\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.417363 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-catalog-content\") pod \"certified-operators-twc2v\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.417397 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlvs2\" (UniqueName: \"kubernetes.io/projected/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-kube-api-access-qlvs2\") pod \"certified-operators-twc2v\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.418862 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-catalog-content\") pod \"certified-operators-twc2v\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.418872 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-utilities\") pod \"certified-operators-twc2v\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.443992 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlvs2\" (UniqueName: \"kubernetes.io/projected/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-kube-api-access-qlvs2\") pod \"certified-operators-twc2v\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.557420 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.813620 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-twc2v"] Dec 04 15:18:10 crc kubenswrapper[4946]: I1204 15:18:10.915425 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twc2v" event={"ID":"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455","Type":"ContainerStarted","Data":"5ce3f7f6826bdf0d4d891713a34bc27507e7697374554b8bb8c035715e16de2c"} Dec 04 15:18:12 crc kubenswrapper[4946]: I1204 15:18:12.789510 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:12 crc kubenswrapper[4946]: I1204 15:18:12.789943 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:12 crc kubenswrapper[4946]: I1204 15:18:12.838781 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:12 crc kubenswrapper[4946]: I1204 15:18:12.928849 4946 generic.go:334] "Generic (PLEG): container finished" podID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" containerID="bf1afc5c2457f9ad1b2e7a43e41074b7cf0af71845ea436c2b8d3b175de26929" exitCode=0 Dec 04 15:18:12 crc kubenswrapper[4946]: I1204 15:18:12.928908 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twc2v" event={"ID":"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455","Type":"ContainerDied","Data":"bf1afc5c2457f9ad1b2e7a43e41074b7cf0af71845ea436c2b8d3b175de26929"} Dec 04 15:18:12 crc kubenswrapper[4946]: I1204 15:18:12.975304 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:14 crc kubenswrapper[4946]: I1204 15:18:14.962930 4946 generic.go:334] "Generic (PLEG): container finished" podID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" containerID="e21eb2b7b5ebb406f859a4263977c6d7c68b7d60a61f8d30971717e7fd102b3a" exitCode=0 Dec 04 15:18:14 crc kubenswrapper[4946]: I1204 15:18:14.963000 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twc2v" event={"ID":"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455","Type":"ContainerDied","Data":"e21eb2b7b5ebb406f859a4263977c6d7c68b7d60a61f8d30971717e7fd102b3a"} Dec 04 15:18:16 crc kubenswrapper[4946]: I1204 15:18:16.412595 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ns4kg"] Dec 04 15:18:16 crc kubenswrapper[4946]: I1204 15:18:16.413409 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ns4kg" podUID="8cd27802-0a2d-410a-93a0-d979a81d92d4" containerName="registry-server" containerID="cri-o://98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3" gracePeriod=2 Dec 04 15:18:16 crc kubenswrapper[4946]: I1204 15:18:16.875612 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:16 crc kubenswrapper[4946]: I1204 15:18:16.979632 4946 generic.go:334] "Generic (PLEG): container finished" podID="8cd27802-0a2d-410a-93a0-d979a81d92d4" containerID="98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3" exitCode=0 Dec 04 15:18:16 crc kubenswrapper[4946]: I1204 15:18:16.979701 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ns4kg" Dec 04 15:18:16 crc kubenswrapper[4946]: I1204 15:18:16.979752 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ns4kg" event={"ID":"8cd27802-0a2d-410a-93a0-d979a81d92d4","Type":"ContainerDied","Data":"98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3"} Dec 04 15:18:16 crc kubenswrapper[4946]: I1204 15:18:16.979826 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ns4kg" event={"ID":"8cd27802-0a2d-410a-93a0-d979a81d92d4","Type":"ContainerDied","Data":"0b19119b015f1a241f82c7fdb477210a579680e3f790025d0e13e765811fac79"} Dec 04 15:18:16 crc kubenswrapper[4946]: I1204 15:18:16.979863 4946 scope.go:117] "RemoveContainer" containerID="98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3" Dec 04 15:18:16 crc kubenswrapper[4946]: I1204 15:18:16.987597 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twc2v" event={"ID":"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455","Type":"ContainerStarted","Data":"cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d"} Dec 04 15:18:16 crc kubenswrapper[4946]: I1204 15:18:16.992265 4946 generic.go:334] "Generic (PLEG): container finished" podID="7349bd68-959a-4268-a194-f55f10061076" containerID="79d8dcbf5edea2cd582097400ead396a5dbcfd70f8306c7d41512518797d2ab2" exitCode=0 Dec 04 15:18:16 crc kubenswrapper[4946]: I1204 15:18:16.992333 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" event={"ID":"7349bd68-959a-4268-a194-f55f10061076","Type":"ContainerDied","Data":"79d8dcbf5edea2cd582097400ead396a5dbcfd70f8306c7d41512518797d2ab2"} Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.002746 4946 scope.go:117] "RemoveContainer" containerID="ff74a0d8c7f8d5f39437d583e8e44cb7fa9e452ef5a943d3c092bdae2c0307a6" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.020290 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-twc2v" podStartSLOduration=3.983846898 podStartE2EDuration="7.020265148s" podCreationTimestamp="2025-12-04 15:18:10 +0000 UTC" firstStartedPulling="2025-12-04 15:18:12.930834381 +0000 UTC m=+943.816878022" lastFinishedPulling="2025-12-04 15:18:15.967252631 +0000 UTC m=+946.853296272" observedRunningTime="2025-12-04 15:18:17.016629419 +0000 UTC m=+947.902673080" watchObservedRunningTime="2025-12-04 15:18:17.020265148 +0000 UTC m=+947.906308799" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.021956 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-catalog-content\") pod \"8cd27802-0a2d-410a-93a0-d979a81d92d4\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.022021 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-utilities\") pod \"8cd27802-0a2d-410a-93a0-d979a81d92d4\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.022103 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69khg\" (UniqueName: \"kubernetes.io/projected/8cd27802-0a2d-410a-93a0-d979a81d92d4-kube-api-access-69khg\") pod \"8cd27802-0a2d-410a-93a0-d979a81d92d4\" (UID: \"8cd27802-0a2d-410a-93a0-d979a81d92d4\") " Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.024325 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-utilities" (OuterVolumeSpecName: "utilities") pod "8cd27802-0a2d-410a-93a0-d979a81d92d4" (UID: "8cd27802-0a2d-410a-93a0-d979a81d92d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.029039 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cd27802-0a2d-410a-93a0-d979a81d92d4-kube-api-access-69khg" (OuterVolumeSpecName: "kube-api-access-69khg") pod "8cd27802-0a2d-410a-93a0-d979a81d92d4" (UID: "8cd27802-0a2d-410a-93a0-d979a81d92d4"). InnerVolumeSpecName "kube-api-access-69khg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.041823 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8cd27802-0a2d-410a-93a0-d979a81d92d4" (UID: "8cd27802-0a2d-410a-93a0-d979a81d92d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.059550 4946 scope.go:117] "RemoveContainer" containerID="c2b9a89674fc856e5834b30de1a34c340dc73e020664a4090f41e1b30646af76" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.099508 4946 scope.go:117] "RemoveContainer" containerID="98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3" Dec 04 15:18:17 crc kubenswrapper[4946]: E1204 15:18:17.100538 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3\": container with ID starting with 98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3 not found: ID does not exist" containerID="98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.100567 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3"} err="failed to get container status \"98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3\": rpc error: code = NotFound desc = could not find container \"98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3\": container with ID starting with 98a9bc730bd224c593b619af5cf2fa2f9bd4dce2a61058e122f24843d31163c3 not found: ID does not exist" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.100590 4946 scope.go:117] "RemoveContainer" containerID="ff74a0d8c7f8d5f39437d583e8e44cb7fa9e452ef5a943d3c092bdae2c0307a6" Dec 04 15:18:17 crc kubenswrapper[4946]: E1204 15:18:17.101032 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff74a0d8c7f8d5f39437d583e8e44cb7fa9e452ef5a943d3c092bdae2c0307a6\": container with ID starting with ff74a0d8c7f8d5f39437d583e8e44cb7fa9e452ef5a943d3c092bdae2c0307a6 not found: ID does not exist" containerID="ff74a0d8c7f8d5f39437d583e8e44cb7fa9e452ef5a943d3c092bdae2c0307a6" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.101056 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff74a0d8c7f8d5f39437d583e8e44cb7fa9e452ef5a943d3c092bdae2c0307a6"} err="failed to get container status \"ff74a0d8c7f8d5f39437d583e8e44cb7fa9e452ef5a943d3c092bdae2c0307a6\": rpc error: code = NotFound desc = could not find container \"ff74a0d8c7f8d5f39437d583e8e44cb7fa9e452ef5a943d3c092bdae2c0307a6\": container with ID starting with ff74a0d8c7f8d5f39437d583e8e44cb7fa9e452ef5a943d3c092bdae2c0307a6 not found: ID does not exist" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.101069 4946 scope.go:117] "RemoveContainer" containerID="c2b9a89674fc856e5834b30de1a34c340dc73e020664a4090f41e1b30646af76" Dec 04 15:18:17 crc kubenswrapper[4946]: E1204 15:18:17.101351 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2b9a89674fc856e5834b30de1a34c340dc73e020664a4090f41e1b30646af76\": container with ID starting with c2b9a89674fc856e5834b30de1a34c340dc73e020664a4090f41e1b30646af76 not found: ID does not exist" containerID="c2b9a89674fc856e5834b30de1a34c340dc73e020664a4090f41e1b30646af76" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.101372 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2b9a89674fc856e5834b30de1a34c340dc73e020664a4090f41e1b30646af76"} err="failed to get container status \"c2b9a89674fc856e5834b30de1a34c340dc73e020664a4090f41e1b30646af76\": rpc error: code = NotFound desc = could not find container \"c2b9a89674fc856e5834b30de1a34c340dc73e020664a4090f41e1b30646af76\": container with ID starting with c2b9a89674fc856e5834b30de1a34c340dc73e020664a4090f41e1b30646af76 not found: ID does not exist" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.124479 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.124509 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd27802-0a2d-410a-93a0-d979a81d92d4-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.124521 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69khg\" (UniqueName: \"kubernetes.io/projected/8cd27802-0a2d-410a-93a0-d979a81d92d4-kube-api-access-69khg\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.312876 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ns4kg"] Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.319095 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ns4kg"] Dec 04 15:18:17 crc kubenswrapper[4946]: I1204 15:18:17.462625 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cd27802-0a2d-410a-93a0-d979a81d92d4" path="/var/lib/kubelet/pods/8cd27802-0a2d-410a-93a0-d979a81d92d4/volumes" Dec 04 15:18:18 crc kubenswrapper[4946]: I1204 15:18:18.003978 4946 generic.go:334] "Generic (PLEG): container finished" podID="7349bd68-959a-4268-a194-f55f10061076" containerID="68ddf6175a81de48db057d0c02367fafd3cd922d4043e76840657d48e39b882f" exitCode=0 Dec 04 15:18:18 crc kubenswrapper[4946]: I1204 15:18:18.004056 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" event={"ID":"7349bd68-959a-4268-a194-f55f10061076","Type":"ContainerDied","Data":"68ddf6175a81de48db057d0c02367fafd3cd922d4043e76840657d48e39b882f"} Dec 04 15:18:19 crc kubenswrapper[4946]: I1204 15:18:19.267245 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:19 crc kubenswrapper[4946]: I1204 15:18:19.365314 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-bundle\") pod \"7349bd68-959a-4268-a194-f55f10061076\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " Dec 04 15:18:19 crc kubenswrapper[4946]: I1204 15:18:19.365401 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-util\") pod \"7349bd68-959a-4268-a194-f55f10061076\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " Dec 04 15:18:19 crc kubenswrapper[4946]: I1204 15:18:19.365432 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85h4b\" (UniqueName: \"kubernetes.io/projected/7349bd68-959a-4268-a194-f55f10061076-kube-api-access-85h4b\") pod \"7349bd68-959a-4268-a194-f55f10061076\" (UID: \"7349bd68-959a-4268-a194-f55f10061076\") " Dec 04 15:18:19 crc kubenswrapper[4946]: I1204 15:18:19.366867 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-bundle" (OuterVolumeSpecName: "bundle") pod "7349bd68-959a-4268-a194-f55f10061076" (UID: "7349bd68-959a-4268-a194-f55f10061076"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:18:19 crc kubenswrapper[4946]: I1204 15:18:19.376578 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7349bd68-959a-4268-a194-f55f10061076-kube-api-access-85h4b" (OuterVolumeSpecName: "kube-api-access-85h4b") pod "7349bd68-959a-4268-a194-f55f10061076" (UID: "7349bd68-959a-4268-a194-f55f10061076"). InnerVolumeSpecName "kube-api-access-85h4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:18:19 crc kubenswrapper[4946]: I1204 15:18:19.377736 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-util" (OuterVolumeSpecName: "util") pod "7349bd68-959a-4268-a194-f55f10061076" (UID: "7349bd68-959a-4268-a194-f55f10061076"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:18:19 crc kubenswrapper[4946]: I1204 15:18:19.467629 4946 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-util\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:19 crc kubenswrapper[4946]: I1204 15:18:19.467670 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85h4b\" (UniqueName: \"kubernetes.io/projected/7349bd68-959a-4268-a194-f55f10061076-kube-api-access-85h4b\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:19 crc kubenswrapper[4946]: I1204 15:18:19.467698 4946 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7349bd68-959a-4268-a194-f55f10061076-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:20 crc kubenswrapper[4946]: I1204 15:18:20.020546 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" event={"ID":"7349bd68-959a-4268-a194-f55f10061076","Type":"ContainerDied","Data":"2b88504139ae4ed836e680f2b1f252ee552337679cb73cfd549573e94c573300"} Dec 04 15:18:20 crc kubenswrapper[4946]: I1204 15:18:20.020611 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b88504139ae4ed836e680f2b1f252ee552337679cb73cfd549573e94c573300" Dec 04 15:18:20 crc kubenswrapper[4946]: I1204 15:18:20.020623 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv" Dec 04 15:18:20 crc kubenswrapper[4946]: I1204 15:18:20.558529 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:20 crc kubenswrapper[4946]: I1204 15:18:20.559516 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:20 crc kubenswrapper[4946]: I1204 15:18:20.668154 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:21 crc kubenswrapper[4946]: I1204 15:18:21.089648 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:23 crc kubenswrapper[4946]: I1204 15:18:23.805520 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-twc2v"] Dec 04 15:18:24 crc kubenswrapper[4946]: I1204 15:18:24.052502 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-twc2v" podUID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" containerName="registry-server" containerID="cri-o://cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d" gracePeriod=2 Dec 04 15:18:24 crc kubenswrapper[4946]: I1204 15:18:24.528522 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:24 crc kubenswrapper[4946]: I1204 15:18:24.645207 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlvs2\" (UniqueName: \"kubernetes.io/projected/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-kube-api-access-qlvs2\") pod \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " Dec 04 15:18:24 crc kubenswrapper[4946]: I1204 15:18:24.645318 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-utilities\") pod \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " Dec 04 15:18:24 crc kubenswrapper[4946]: I1204 15:18:24.645580 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-catalog-content\") pod \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\" (UID: \"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455\") " Dec 04 15:18:24 crc kubenswrapper[4946]: I1204 15:18:24.646410 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-utilities" (OuterVolumeSpecName: "utilities") pod "35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" (UID: "35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:18:24 crc kubenswrapper[4946]: I1204 15:18:24.657305 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-kube-api-access-qlvs2" (OuterVolumeSpecName: "kube-api-access-qlvs2") pod "35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" (UID: "35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455"). InnerVolumeSpecName "kube-api-access-qlvs2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:18:24 crc kubenswrapper[4946]: I1204 15:18:24.698601 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" (UID: "35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:18:24 crc kubenswrapper[4946]: I1204 15:18:24.746631 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:24 crc kubenswrapper[4946]: I1204 15:18:24.746673 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlvs2\" (UniqueName: \"kubernetes.io/projected/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-kube-api-access-qlvs2\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:24 crc kubenswrapper[4946]: I1204 15:18:24.746686 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.060962 4946 generic.go:334] "Generic (PLEG): container finished" podID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" containerID="cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d" exitCode=0 Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.061036 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twc2v" event={"ID":"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455","Type":"ContainerDied","Data":"cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d"} Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.061080 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-twc2v" event={"ID":"35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455","Type":"ContainerDied","Data":"5ce3f7f6826bdf0d4d891713a34bc27507e7697374554b8bb8c035715e16de2c"} Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.061102 4946 scope.go:117] "RemoveContainer" containerID="cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.061342 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-twc2v" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.094672 4946 scope.go:117] "RemoveContainer" containerID="e21eb2b7b5ebb406f859a4263977c6d7c68b7d60a61f8d30971717e7fd102b3a" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.100605 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-twc2v"] Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.122266 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-twc2v"] Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.125938 4946 scope.go:117] "RemoveContainer" containerID="bf1afc5c2457f9ad1b2e7a43e41074b7cf0af71845ea436c2b8d3b175de26929" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.164570 4946 scope.go:117] "RemoveContainer" containerID="cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d" Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.165228 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d\": container with ID starting with cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d not found: ID does not exist" containerID="cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.165299 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d"} err="failed to get container status \"cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d\": rpc error: code = NotFound desc = could not find container \"cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d\": container with ID starting with cd8cb06d872076ac8d37ccdd03e6c694fcf9053ccb797a95c8af40d4542fd15d not found: ID does not exist" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.165338 4946 scope.go:117] "RemoveContainer" containerID="e21eb2b7b5ebb406f859a4263977c6d7c68b7d60a61f8d30971717e7fd102b3a" Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.165801 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e21eb2b7b5ebb406f859a4263977c6d7c68b7d60a61f8d30971717e7fd102b3a\": container with ID starting with e21eb2b7b5ebb406f859a4263977c6d7c68b7d60a61f8d30971717e7fd102b3a not found: ID does not exist" containerID="e21eb2b7b5ebb406f859a4263977c6d7c68b7d60a61f8d30971717e7fd102b3a" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.165866 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e21eb2b7b5ebb406f859a4263977c6d7c68b7d60a61f8d30971717e7fd102b3a"} err="failed to get container status \"e21eb2b7b5ebb406f859a4263977c6d7c68b7d60a61f8d30971717e7fd102b3a\": rpc error: code = NotFound desc = could not find container \"e21eb2b7b5ebb406f859a4263977c6d7c68b7d60a61f8d30971717e7fd102b3a\": container with ID starting with e21eb2b7b5ebb406f859a4263977c6d7c68b7d60a61f8d30971717e7fd102b3a not found: ID does not exist" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.165910 4946 scope.go:117] "RemoveContainer" containerID="bf1afc5c2457f9ad1b2e7a43e41074b7cf0af71845ea436c2b8d3b175de26929" Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.166403 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf1afc5c2457f9ad1b2e7a43e41074b7cf0af71845ea436c2b8d3b175de26929\": container with ID starting with bf1afc5c2457f9ad1b2e7a43e41074b7cf0af71845ea436c2b8d3b175de26929 not found: ID does not exist" containerID="bf1afc5c2457f9ad1b2e7a43e41074b7cf0af71845ea436c2b8d3b175de26929" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.166443 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf1afc5c2457f9ad1b2e7a43e41074b7cf0af71845ea436c2b8d3b175de26929"} err="failed to get container status \"bf1afc5c2457f9ad1b2e7a43e41074b7cf0af71845ea436c2b8d3b175de26929\": rpc error: code = NotFound desc = could not find container \"bf1afc5c2457f9ad1b2e7a43e41074b7cf0af71845ea436c2b8d3b175de26929\": container with ID starting with bf1afc5c2457f9ad1b2e7a43e41074b7cf0af71845ea436c2b8d3b175de26929 not found: ID does not exist" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.462601 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" path="/var/lib/kubelet/pods/35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455/volumes" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.511959 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2"] Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.512222 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cd27802-0a2d-410a-93a0-d979a81d92d4" containerName="registry-server" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512238 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cd27802-0a2d-410a-93a0-d979a81d92d4" containerName="registry-server" Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.512249 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7349bd68-959a-4268-a194-f55f10061076" containerName="util" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512257 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="7349bd68-959a-4268-a194-f55f10061076" containerName="util" Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.512274 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cd27802-0a2d-410a-93a0-d979a81d92d4" containerName="extract-utilities" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512281 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cd27802-0a2d-410a-93a0-d979a81d92d4" containerName="extract-utilities" Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.512291 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" containerName="extract-utilities" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512297 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" containerName="extract-utilities" Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.512307 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" containerName="registry-server" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512313 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" containerName="registry-server" Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.512322 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cd27802-0a2d-410a-93a0-d979a81d92d4" containerName="extract-content" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512328 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cd27802-0a2d-410a-93a0-d979a81d92d4" containerName="extract-content" Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.512337 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7349bd68-959a-4268-a194-f55f10061076" containerName="extract" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512343 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="7349bd68-959a-4268-a194-f55f10061076" containerName="extract" Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.512352 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" containerName="extract-content" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512358 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" containerName="extract-content" Dec 04 15:18:25 crc kubenswrapper[4946]: E1204 15:18:25.512367 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7349bd68-959a-4268-a194-f55f10061076" containerName="pull" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512373 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="7349bd68-959a-4268-a194-f55f10061076" containerName="pull" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512482 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="7349bd68-959a-4268-a194-f55f10061076" containerName="extract" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512492 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cd27802-0a2d-410a-93a0-d979a81d92d4" containerName="registry-server" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512509 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="35f7f3a8-69e0-44bd-9dbf-0c9a75ba7455" containerName="registry-server" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.512950 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.516980 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.517205 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.516989 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.517427 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-rmsn6" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.517613 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.558304 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/14e7c7f4-2a8f-42fc-85aa-11f9a57be226-webhook-cert\") pod \"metallb-operator-controller-manager-9c8665f76-qlhr2\" (UID: \"14e7c7f4-2a8f-42fc-85aa-11f9a57be226\") " pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.558395 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/14e7c7f4-2a8f-42fc-85aa-11f9a57be226-apiservice-cert\") pod \"metallb-operator-controller-manager-9c8665f76-qlhr2\" (UID: \"14e7c7f4-2a8f-42fc-85aa-11f9a57be226\") " pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.558476 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj7pz\" (UniqueName: \"kubernetes.io/projected/14e7c7f4-2a8f-42fc-85aa-11f9a57be226-kube-api-access-fj7pz\") pod \"metallb-operator-controller-manager-9c8665f76-qlhr2\" (UID: \"14e7c7f4-2a8f-42fc-85aa-11f9a57be226\") " pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.639205 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2"] Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.659908 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/14e7c7f4-2a8f-42fc-85aa-11f9a57be226-webhook-cert\") pod \"metallb-operator-controller-manager-9c8665f76-qlhr2\" (UID: \"14e7c7f4-2a8f-42fc-85aa-11f9a57be226\") " pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.660037 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/14e7c7f4-2a8f-42fc-85aa-11f9a57be226-apiservice-cert\") pod \"metallb-operator-controller-manager-9c8665f76-qlhr2\" (UID: \"14e7c7f4-2a8f-42fc-85aa-11f9a57be226\") " pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.660108 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj7pz\" (UniqueName: \"kubernetes.io/projected/14e7c7f4-2a8f-42fc-85aa-11f9a57be226-kube-api-access-fj7pz\") pod \"metallb-operator-controller-manager-9c8665f76-qlhr2\" (UID: \"14e7c7f4-2a8f-42fc-85aa-11f9a57be226\") " pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.667390 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/14e7c7f4-2a8f-42fc-85aa-11f9a57be226-apiservice-cert\") pod \"metallb-operator-controller-manager-9c8665f76-qlhr2\" (UID: \"14e7c7f4-2a8f-42fc-85aa-11f9a57be226\") " pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.680968 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/14e7c7f4-2a8f-42fc-85aa-11f9a57be226-webhook-cert\") pod \"metallb-operator-controller-manager-9c8665f76-qlhr2\" (UID: \"14e7c7f4-2a8f-42fc-85aa-11f9a57be226\") " pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.685956 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj7pz\" (UniqueName: \"kubernetes.io/projected/14e7c7f4-2a8f-42fc-85aa-11f9a57be226-kube-api-access-fj7pz\") pod \"metallb-operator-controller-manager-9c8665f76-qlhr2\" (UID: \"14e7c7f4-2a8f-42fc-85aa-11f9a57be226\") " pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.829911 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.854939 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp"] Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.856109 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.859968 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.869091 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.869516 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-hr25n" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.894575 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp"] Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.972865 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c07a0c11-227a-4c24-8daa-695fa165bb03-webhook-cert\") pod \"metallb-operator-webhook-server-76986644d9-lbmnp\" (UID: \"c07a0c11-227a-4c24-8daa-695fa165bb03\") " pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.973227 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c07a0c11-227a-4c24-8daa-695fa165bb03-apiservice-cert\") pod \"metallb-operator-webhook-server-76986644d9-lbmnp\" (UID: \"c07a0c11-227a-4c24-8daa-695fa165bb03\") " pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:25 crc kubenswrapper[4946]: I1204 15:18:25.973273 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6ql6\" (UniqueName: \"kubernetes.io/projected/c07a0c11-227a-4c24-8daa-695fa165bb03-kube-api-access-w6ql6\") pod \"metallb-operator-webhook-server-76986644d9-lbmnp\" (UID: \"c07a0c11-227a-4c24-8daa-695fa165bb03\") " pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:26 crc kubenswrapper[4946]: I1204 15:18:26.077861 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6ql6\" (UniqueName: \"kubernetes.io/projected/c07a0c11-227a-4c24-8daa-695fa165bb03-kube-api-access-w6ql6\") pod \"metallb-operator-webhook-server-76986644d9-lbmnp\" (UID: \"c07a0c11-227a-4c24-8daa-695fa165bb03\") " pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:26 crc kubenswrapper[4946]: I1204 15:18:26.081234 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c07a0c11-227a-4c24-8daa-695fa165bb03-webhook-cert\") pod \"metallb-operator-webhook-server-76986644d9-lbmnp\" (UID: \"c07a0c11-227a-4c24-8daa-695fa165bb03\") " pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:26 crc kubenswrapper[4946]: I1204 15:18:26.081475 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c07a0c11-227a-4c24-8daa-695fa165bb03-apiservice-cert\") pod \"metallb-operator-webhook-server-76986644d9-lbmnp\" (UID: \"c07a0c11-227a-4c24-8daa-695fa165bb03\") " pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:26 crc kubenswrapper[4946]: I1204 15:18:26.087693 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c07a0c11-227a-4c24-8daa-695fa165bb03-webhook-cert\") pod \"metallb-operator-webhook-server-76986644d9-lbmnp\" (UID: \"c07a0c11-227a-4c24-8daa-695fa165bb03\") " pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:26 crc kubenswrapper[4946]: I1204 15:18:26.093911 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c07a0c11-227a-4c24-8daa-695fa165bb03-apiservice-cert\") pod \"metallb-operator-webhook-server-76986644d9-lbmnp\" (UID: \"c07a0c11-227a-4c24-8daa-695fa165bb03\") " pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:26 crc kubenswrapper[4946]: I1204 15:18:26.119827 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6ql6\" (UniqueName: \"kubernetes.io/projected/c07a0c11-227a-4c24-8daa-695fa165bb03-kube-api-access-w6ql6\") pod \"metallb-operator-webhook-server-76986644d9-lbmnp\" (UID: \"c07a0c11-227a-4c24-8daa-695fa165bb03\") " pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:26 crc kubenswrapper[4946]: I1204 15:18:26.172482 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2"] Dec 04 15:18:26 crc kubenswrapper[4946]: W1204 15:18:26.193081 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14e7c7f4_2a8f_42fc_85aa_11f9a57be226.slice/crio-438ff8e8199ce2fa9034ed3181fdc592a0e5f3da08ba4f4b21c72b419af5efe0 WatchSource:0}: Error finding container 438ff8e8199ce2fa9034ed3181fdc592a0e5f3da08ba4f4b21c72b419af5efe0: Status 404 returned error can't find the container with id 438ff8e8199ce2fa9034ed3181fdc592a0e5f3da08ba4f4b21c72b419af5efe0 Dec 04 15:18:26 crc kubenswrapper[4946]: I1204 15:18:26.235550 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:26 crc kubenswrapper[4946]: I1204 15:18:26.517929 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp"] Dec 04 15:18:26 crc kubenswrapper[4946]: W1204 15:18:26.528143 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc07a0c11_227a_4c24_8daa_695fa165bb03.slice/crio-e4973179f92b50388fe2c3b0527192ef7a3317e1a6363c7828fe4bdffd0c7c30 WatchSource:0}: Error finding container e4973179f92b50388fe2c3b0527192ef7a3317e1a6363c7828fe4bdffd0c7c30: Status 404 returned error can't find the container with id e4973179f92b50388fe2c3b0527192ef7a3317e1a6363c7828fe4bdffd0c7c30 Dec 04 15:18:27 crc kubenswrapper[4946]: I1204 15:18:27.089055 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" event={"ID":"c07a0c11-227a-4c24-8daa-695fa165bb03","Type":"ContainerStarted","Data":"e4973179f92b50388fe2c3b0527192ef7a3317e1a6363c7828fe4bdffd0c7c30"} Dec 04 15:18:27 crc kubenswrapper[4946]: I1204 15:18:27.090332 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" event={"ID":"14e7c7f4-2a8f-42fc-85aa-11f9a57be226","Type":"ContainerStarted","Data":"438ff8e8199ce2fa9034ed3181fdc592a0e5f3da08ba4f4b21c72b419af5efe0"} Dec 04 15:18:34 crc kubenswrapper[4946]: I1204 15:18:34.164082 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" event={"ID":"c07a0c11-227a-4c24-8daa-695fa165bb03","Type":"ContainerStarted","Data":"24b772ee1082adacae1115b23f7f3945e0c290c61ae694de6f461044ed5f16ea"} Dec 04 15:18:34 crc kubenswrapper[4946]: I1204 15:18:34.165091 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:34 crc kubenswrapper[4946]: I1204 15:18:34.166272 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" event={"ID":"14e7c7f4-2a8f-42fc-85aa-11f9a57be226","Type":"ContainerStarted","Data":"2e8a34440368b66fd5a9bee0d197e474127177998bd5ba071f0a5c38c6783b20"} Dec 04 15:18:34 crc kubenswrapper[4946]: I1204 15:18:34.166556 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:18:34 crc kubenswrapper[4946]: I1204 15:18:34.189920 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" podStartSLOduration=2.460793534 podStartE2EDuration="9.189888842s" podCreationTimestamp="2025-12-04 15:18:25 +0000 UTC" firstStartedPulling="2025-12-04 15:18:26.531775716 +0000 UTC m=+957.417819357" lastFinishedPulling="2025-12-04 15:18:33.260871024 +0000 UTC m=+964.146914665" observedRunningTime="2025-12-04 15:18:34.184040923 +0000 UTC m=+965.070084564" watchObservedRunningTime="2025-12-04 15:18:34.189888842 +0000 UTC m=+965.075932483" Dec 04 15:18:34 crc kubenswrapper[4946]: I1204 15:18:34.227203 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" podStartSLOduration=2.178144615 podStartE2EDuration="9.227170475s" podCreationTimestamp="2025-12-04 15:18:25 +0000 UTC" firstStartedPulling="2025-12-04 15:18:26.198411399 +0000 UTC m=+957.084455040" lastFinishedPulling="2025-12-04 15:18:33.247437259 +0000 UTC m=+964.133480900" observedRunningTime="2025-12-04 15:18:34.208707813 +0000 UTC m=+965.094751474" watchObservedRunningTime="2025-12-04 15:18:34.227170475 +0000 UTC m=+965.113214126" Dec 04 15:18:46 crc kubenswrapper[4946]: I1204 15:18:46.241330 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-76986644d9-lbmnp" Dec 04 15:18:52 crc kubenswrapper[4946]: I1204 15:18:52.479166 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:18:52 crc kubenswrapper[4946]: I1204 15:18:52.479834 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:19:05 crc kubenswrapper[4946]: I1204 15:19:05.834841 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-9c8665f76-qlhr2" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.694258 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-9r7z7"] Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.697105 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.698598 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd"] Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.699443 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.699679 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.699935 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.699966 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-bfn42" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.700981 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.744939 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd"] Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.796154 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6dmh\" (UniqueName: \"kubernetes.io/projected/2232132f-0377-4daf-98dc-9a2ea013a794-kube-api-access-m6dmh\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.796245 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2232132f-0377-4daf-98dc-9a2ea013a794-metrics-certs\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.796292 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-reloader\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.796341 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvw7z\" (UniqueName: \"kubernetes.io/projected/3fb0ad82-3e42-4980-ac9c-3fba3fac16fa-kube-api-access-tvw7z\") pod \"frr-k8s-webhook-server-7fcb986d4-665hd\" (UID: \"3fb0ad82-3e42-4980-ac9c-3fba3fac16fa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.796393 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/2232132f-0377-4daf-98dc-9a2ea013a794-frr-startup\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.796450 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-frr-conf\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.796502 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-metrics\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.796531 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3fb0ad82-3e42-4980-ac9c-3fba3fac16fa-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-665hd\" (UID: \"3fb0ad82-3e42-4980-ac9c-3fba3fac16fa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.797757 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-frr-sockets\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.825530 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-nsx9h"] Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.826541 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-nsx9h" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.833774 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.833895 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-6jwc5" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.834054 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.834154 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.878955 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-2h9x2"] Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.880453 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.883270 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899205 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/12008ba7-79ab-4c29-beb5-c3d5bffa7bd2-metrics-certs\") pod \"controller-f8648f98b-2h9x2\" (UID: \"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2\") " pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899269 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-metrics\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899298 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3fb0ad82-3e42-4980-ac9c-3fba3fac16fa-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-665hd\" (UID: \"3fb0ad82-3e42-4980-ac9c-3fba3fac16fa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899328 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-frr-sockets\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899350 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-memberlist\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899393 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6dmh\" (UniqueName: \"kubernetes.io/projected/2232132f-0377-4daf-98dc-9a2ea013a794-kube-api-access-m6dmh\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899419 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/12008ba7-79ab-4c29-beb5-c3d5bffa7bd2-cert\") pod \"controller-f8648f98b-2h9x2\" (UID: \"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2\") " pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899440 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sqn8\" (UniqueName: \"kubernetes.io/projected/2c933247-f732-4808-a196-15d9ad5f03e7-kube-api-access-5sqn8\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899465 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2232132f-0377-4daf-98dc-9a2ea013a794-metrics-certs\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899486 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-reloader\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899514 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvw7z\" (UniqueName: \"kubernetes.io/projected/3fb0ad82-3e42-4980-ac9c-3fba3fac16fa-kube-api-access-tvw7z\") pod \"frr-k8s-webhook-server-7fcb986d4-665hd\" (UID: \"3fb0ad82-3e42-4980-ac9c-3fba3fac16fa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899534 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfg9x\" (UniqueName: \"kubernetes.io/projected/12008ba7-79ab-4c29-beb5-c3d5bffa7bd2-kube-api-access-gfg9x\") pod \"controller-f8648f98b-2h9x2\" (UID: \"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2\") " pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899556 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2c933247-f732-4808-a196-15d9ad5f03e7-metallb-excludel2\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899577 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/2232132f-0377-4daf-98dc-9a2ea013a794-frr-startup\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899610 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-frr-conf\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.899635 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-metrics-certs\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.900164 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-frr-sockets\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.900770 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-frr-conf\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.900935 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-2h9x2"] Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.901064 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-reloader\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.901502 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/2232132f-0377-4daf-98dc-9a2ea013a794-frr-startup\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.901858 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/2232132f-0377-4daf-98dc-9a2ea013a794-metrics\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.908392 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2232132f-0377-4daf-98dc-9a2ea013a794-metrics-certs\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.908994 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3fb0ad82-3e42-4980-ac9c-3fba3fac16fa-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-665hd\" (UID: \"3fb0ad82-3e42-4980-ac9c-3fba3fac16fa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.931537 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvw7z\" (UniqueName: \"kubernetes.io/projected/3fb0ad82-3e42-4980-ac9c-3fba3fac16fa-kube-api-access-tvw7z\") pod \"frr-k8s-webhook-server-7fcb986d4-665hd\" (UID: \"3fb0ad82-3e42-4980-ac9c-3fba3fac16fa\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" Dec 04 15:19:06 crc kubenswrapper[4946]: I1204 15:19:06.939259 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6dmh\" (UniqueName: \"kubernetes.io/projected/2232132f-0377-4daf-98dc-9a2ea013a794-kube-api-access-m6dmh\") pod \"frr-k8s-9r7z7\" (UID: \"2232132f-0377-4daf-98dc-9a2ea013a794\") " pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.000648 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/12008ba7-79ab-4c29-beb5-c3d5bffa7bd2-cert\") pod \"controller-f8648f98b-2h9x2\" (UID: \"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2\") " pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.000701 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sqn8\" (UniqueName: \"kubernetes.io/projected/2c933247-f732-4808-a196-15d9ad5f03e7-kube-api-access-5sqn8\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.000732 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfg9x\" (UniqueName: \"kubernetes.io/projected/12008ba7-79ab-4c29-beb5-c3d5bffa7bd2-kube-api-access-gfg9x\") pod \"controller-f8648f98b-2h9x2\" (UID: \"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2\") " pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.000776 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2c933247-f732-4808-a196-15d9ad5f03e7-metallb-excludel2\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.000809 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-metrics-certs\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.000833 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/12008ba7-79ab-4c29-beb5-c3d5bffa7bd2-metrics-certs\") pod \"controller-f8648f98b-2h9x2\" (UID: \"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2\") " pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.000866 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-memberlist\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:07 crc kubenswrapper[4946]: E1204 15:19:07.000996 4946 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 04 15:19:07 crc kubenswrapper[4946]: E1204 15:19:07.001058 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-memberlist podName:2c933247-f732-4808-a196-15d9ad5f03e7 nodeName:}" failed. No retries permitted until 2025-12-04 15:19:07.501039651 +0000 UTC m=+998.387083292 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-memberlist") pod "speaker-nsx9h" (UID: "2c933247-f732-4808-a196-15d9ad5f03e7") : secret "metallb-memberlist" not found Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.002778 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2c933247-f732-4808-a196-15d9ad5f03e7-metallb-excludel2\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.005213 4946 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.005646 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/12008ba7-79ab-4c29-beb5-c3d5bffa7bd2-metrics-certs\") pod \"controller-f8648f98b-2h9x2\" (UID: \"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2\") " pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.014286 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-metrics-certs\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.018046 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/12008ba7-79ab-4c29-beb5-c3d5bffa7bd2-cert\") pod \"controller-f8648f98b-2h9x2\" (UID: \"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2\") " pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.020007 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.022002 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sqn8\" (UniqueName: \"kubernetes.io/projected/2c933247-f732-4808-a196-15d9ad5f03e7-kube-api-access-5sqn8\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.023941 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfg9x\" (UniqueName: \"kubernetes.io/projected/12008ba7-79ab-4c29-beb5-c3d5bffa7bd2-kube-api-access-gfg9x\") pod \"controller-f8648f98b-2h9x2\" (UID: \"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2\") " pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.032834 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.215556 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.381893 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd"] Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.526493 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-memberlist\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:07 crc kubenswrapper[4946]: E1204 15:19:07.526752 4946 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 04 15:19:07 crc kubenswrapper[4946]: E1204 15:19:07.526874 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-memberlist podName:2c933247-f732-4808-a196-15d9ad5f03e7 nodeName:}" failed. No retries permitted until 2025-12-04 15:19:08.526827142 +0000 UTC m=+999.412870783 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-memberlist") pod "speaker-nsx9h" (UID: "2c933247-f732-4808-a196-15d9ad5f03e7") : secret "metallb-memberlist" not found Dec 04 15:19:07 crc kubenswrapper[4946]: I1204 15:19:07.567577 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-2h9x2"] Dec 04 15:19:07 crc kubenswrapper[4946]: W1204 15:19:07.573417 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12008ba7_79ab_4c29_beb5_c3d5bffa7bd2.slice/crio-de51a37166683b23fe72a53203cc89513b2e0268262a96b246cb7ebdf449e078 WatchSource:0}: Error finding container de51a37166683b23fe72a53203cc89513b2e0268262a96b246cb7ebdf449e078: Status 404 returned error can't find the container with id de51a37166683b23fe72a53203cc89513b2e0268262a96b246cb7ebdf449e078 Dec 04 15:19:08 crc kubenswrapper[4946]: I1204 15:19:08.415046 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2h9x2" event={"ID":"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2","Type":"ContainerStarted","Data":"570ce0833a48627274bceb9a5bb9127a34c0416f892035adfdf33fa6d3b2527a"} Dec 04 15:19:08 crc kubenswrapper[4946]: I1204 15:19:08.415175 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2h9x2" event={"ID":"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2","Type":"ContainerStarted","Data":"abe6845eb8bc8fa14a4413b75050319aae97fe296c71b927e53fd65886305338"} Dec 04 15:19:08 crc kubenswrapper[4946]: I1204 15:19:08.415190 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2h9x2" event={"ID":"12008ba7-79ab-4c29-beb5-c3d5bffa7bd2","Type":"ContainerStarted","Data":"de51a37166683b23fe72a53203cc89513b2e0268262a96b246cb7ebdf449e078"} Dec 04 15:19:08 crc kubenswrapper[4946]: I1204 15:19:08.415246 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:08 crc kubenswrapper[4946]: I1204 15:19:08.416164 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9r7z7" event={"ID":"2232132f-0377-4daf-98dc-9a2ea013a794","Type":"ContainerStarted","Data":"ff7da91a2b4ea3d7de19985164594521cde15ff3a6e621c277505c06ef89f762"} Dec 04 15:19:08 crc kubenswrapper[4946]: I1204 15:19:08.417434 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" event={"ID":"3fb0ad82-3e42-4980-ac9c-3fba3fac16fa","Type":"ContainerStarted","Data":"74930028bbca5eaf22876fd1195761585f1429273b510979005d2e3dc4e1d3b3"} Dec 04 15:19:08 crc kubenswrapper[4946]: I1204 15:19:08.438704 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-2h9x2" podStartSLOduration=2.438675273 podStartE2EDuration="2.438675273s" podCreationTimestamp="2025-12-04 15:19:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:19:08.435917478 +0000 UTC m=+999.321961119" watchObservedRunningTime="2025-12-04 15:19:08.438675273 +0000 UTC m=+999.324718914" Dec 04 15:19:08 crc kubenswrapper[4946]: I1204 15:19:08.540979 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-memberlist\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:08 crc kubenswrapper[4946]: I1204 15:19:08.548366 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2c933247-f732-4808-a196-15d9ad5f03e7-memberlist\") pod \"speaker-nsx9h\" (UID: \"2c933247-f732-4808-a196-15d9ad5f03e7\") " pod="metallb-system/speaker-nsx9h" Dec 04 15:19:08 crc kubenswrapper[4946]: I1204 15:19:08.646748 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-nsx9h" Dec 04 15:19:08 crc kubenswrapper[4946]: W1204 15:19:08.677139 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c933247_f732_4808_a196_15d9ad5f03e7.slice/crio-f6b00fcae4893a5f538e09ac7201d9afe0712152b5caefcd4f37510de367772e WatchSource:0}: Error finding container f6b00fcae4893a5f538e09ac7201d9afe0712152b5caefcd4f37510de367772e: Status 404 returned error can't find the container with id f6b00fcae4893a5f538e09ac7201d9afe0712152b5caefcd4f37510de367772e Dec 04 15:19:09 crc kubenswrapper[4946]: I1204 15:19:09.441638 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nsx9h" event={"ID":"2c933247-f732-4808-a196-15d9ad5f03e7","Type":"ContainerStarted","Data":"44f88e925908c9cf5befcbbde2fe38ea0701c95acfbbdd077ae68efe88491fb3"} Dec 04 15:19:09 crc kubenswrapper[4946]: I1204 15:19:09.441702 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nsx9h" event={"ID":"2c933247-f732-4808-a196-15d9ad5f03e7","Type":"ContainerStarted","Data":"f6b00fcae4893a5f538e09ac7201d9afe0712152b5caefcd4f37510de367772e"} Dec 04 15:19:10 crc kubenswrapper[4946]: I1204 15:19:10.453263 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nsx9h" event={"ID":"2c933247-f732-4808-a196-15d9ad5f03e7","Type":"ContainerStarted","Data":"e8deed1cb959b2e9ef5d0b0e77a4f999cfd682311c82f897a4962413027b5a1a"} Dec 04 15:19:10 crc kubenswrapper[4946]: I1204 15:19:10.454105 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-nsx9h" Dec 04 15:19:10 crc kubenswrapper[4946]: I1204 15:19:10.484959 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-nsx9h" podStartSLOduration=4.484941267 podStartE2EDuration="4.484941267s" podCreationTimestamp="2025-12-04 15:19:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:19:10.479234751 +0000 UTC m=+1001.365278392" watchObservedRunningTime="2025-12-04 15:19:10.484941267 +0000 UTC m=+1001.370984908" Dec 04 15:19:17 crc kubenswrapper[4946]: I1204 15:19:17.221333 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-2h9x2" Dec 04 15:19:17 crc kubenswrapper[4946]: I1204 15:19:17.530731 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" event={"ID":"3fb0ad82-3e42-4980-ac9c-3fba3fac16fa","Type":"ContainerStarted","Data":"3c745c6eedde02cd2c6762962590405feba039cf076582e8a86297cca5108d98"} Dec 04 15:19:17 crc kubenswrapper[4946]: I1204 15:19:17.531294 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" Dec 04 15:19:17 crc kubenswrapper[4946]: I1204 15:19:17.532626 4946 generic.go:334] "Generic (PLEG): container finished" podID="2232132f-0377-4daf-98dc-9a2ea013a794" containerID="076a2db108a30849967fb689793b09dd6d075fac3f38bbd03e956d816762afdc" exitCode=0 Dec 04 15:19:17 crc kubenswrapper[4946]: I1204 15:19:17.532691 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9r7z7" event={"ID":"2232132f-0377-4daf-98dc-9a2ea013a794","Type":"ContainerDied","Data":"076a2db108a30849967fb689793b09dd6d075fac3f38bbd03e956d816762afdc"} Dec 04 15:19:17 crc kubenswrapper[4946]: I1204 15:19:17.555367 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" podStartSLOduration=1.869543433 podStartE2EDuration="11.555343788s" podCreationTimestamp="2025-12-04 15:19:06 +0000 UTC" firstStartedPulling="2025-12-04 15:19:07.420835639 +0000 UTC m=+998.306879280" lastFinishedPulling="2025-12-04 15:19:17.106635984 +0000 UTC m=+1007.992679635" observedRunningTime="2025-12-04 15:19:17.55209695 +0000 UTC m=+1008.438140601" watchObservedRunningTime="2025-12-04 15:19:17.555343788 +0000 UTC m=+1008.441387429" Dec 04 15:19:18 crc kubenswrapper[4946]: I1204 15:19:18.561493 4946 generic.go:334] "Generic (PLEG): container finished" podID="2232132f-0377-4daf-98dc-9a2ea013a794" containerID="101162d6ec35dd21f728807109e7e868de002cd5c356e3f863d11748c1a6cd8b" exitCode=0 Dec 04 15:19:18 crc kubenswrapper[4946]: I1204 15:19:18.561600 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9r7z7" event={"ID":"2232132f-0377-4daf-98dc-9a2ea013a794","Type":"ContainerDied","Data":"101162d6ec35dd21f728807109e7e868de002cd5c356e3f863d11748c1a6cd8b"} Dec 04 15:19:19 crc kubenswrapper[4946]: I1204 15:19:19.573765 4946 generic.go:334] "Generic (PLEG): container finished" podID="2232132f-0377-4daf-98dc-9a2ea013a794" containerID="becf8b3b86c6dee95f58a6eaa59984823986ba46fbd223d3b3a9e4ae04856f01" exitCode=0 Dec 04 15:19:19 crc kubenswrapper[4946]: I1204 15:19:19.573842 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9r7z7" event={"ID":"2232132f-0377-4daf-98dc-9a2ea013a794","Type":"ContainerDied","Data":"becf8b3b86c6dee95f58a6eaa59984823986ba46fbd223d3b3a9e4ae04856f01"} Dec 04 15:19:20 crc kubenswrapper[4946]: I1204 15:19:20.584621 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9r7z7" event={"ID":"2232132f-0377-4daf-98dc-9a2ea013a794","Type":"ContainerStarted","Data":"55958d04259b14299e3c2035c5892bb6187f7c8054e3eb2bf62db5b8d02a9403"} Dec 04 15:19:20 crc kubenswrapper[4946]: I1204 15:19:20.584690 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9r7z7" event={"ID":"2232132f-0377-4daf-98dc-9a2ea013a794","Type":"ContainerStarted","Data":"a63f36c3c4efdf4f00a11d8535d6d33fe92ef75bedf22502d371596378505539"} Dec 04 15:19:21 crc kubenswrapper[4946]: I1204 15:19:21.599419 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9r7z7" event={"ID":"2232132f-0377-4daf-98dc-9a2ea013a794","Type":"ContainerStarted","Data":"73acba6e762bbf3296d8ea4949f3d331157a7350f27e1388d8ff6d576ce18c11"} Dec 04 15:19:21 crc kubenswrapper[4946]: I1204 15:19:21.599495 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9r7z7" event={"ID":"2232132f-0377-4daf-98dc-9a2ea013a794","Type":"ContainerStarted","Data":"593d28df4e9db38bf3f1998b36d50975367306293576efd293d1ec52e17300ff"} Dec 04 15:19:21 crc kubenswrapper[4946]: I1204 15:19:21.599511 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9r7z7" event={"ID":"2232132f-0377-4daf-98dc-9a2ea013a794","Type":"ContainerStarted","Data":"e5f517e61d5693c45739ca6f5e6437a7103ac9cf427df7927f81bf816ca5696b"} Dec 04 15:19:22 crc kubenswrapper[4946]: I1204 15:19:22.479415 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:19:22 crc kubenswrapper[4946]: I1204 15:19:22.479502 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:19:22 crc kubenswrapper[4946]: I1204 15:19:22.612729 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9r7z7" event={"ID":"2232132f-0377-4daf-98dc-9a2ea013a794","Type":"ContainerStarted","Data":"3203a38fd85e19a2e017dc208dbf84732552ed8760ee38c76765e791f191a6d8"} Dec 04 15:19:22 crc kubenswrapper[4946]: I1204 15:19:22.613182 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:22 crc kubenswrapper[4946]: I1204 15:19:22.641878 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-9r7z7" podStartSLOduration=6.975993149 podStartE2EDuration="16.641844822s" podCreationTimestamp="2025-12-04 15:19:06 +0000 UTC" firstStartedPulling="2025-12-04 15:19:07.413971752 +0000 UTC m=+998.300015393" lastFinishedPulling="2025-12-04 15:19:17.079823425 +0000 UTC m=+1007.965867066" observedRunningTime="2025-12-04 15:19:22.634686407 +0000 UTC m=+1013.520730058" watchObservedRunningTime="2025-12-04 15:19:22.641844822 +0000 UTC m=+1013.527888463" Dec 04 15:19:27 crc kubenswrapper[4946]: I1204 15:19:27.021031 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:27 crc kubenswrapper[4946]: I1204 15:19:27.041931 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-665hd" Dec 04 15:19:27 crc kubenswrapper[4946]: I1204 15:19:27.063879 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:28 crc kubenswrapper[4946]: I1204 15:19:28.652334 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-nsx9h" Dec 04 15:19:31 crc kubenswrapper[4946]: I1204 15:19:31.696364 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-dkskn"] Dec 04 15:19:31 crc kubenswrapper[4946]: I1204 15:19:31.697876 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dkskn" Dec 04 15:19:31 crc kubenswrapper[4946]: I1204 15:19:31.700401 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-rfshw" Dec 04 15:19:31 crc kubenswrapper[4946]: I1204 15:19:31.700672 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 04 15:19:31 crc kubenswrapper[4946]: I1204 15:19:31.700825 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 04 15:19:31 crc kubenswrapper[4946]: I1204 15:19:31.718210 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-dkskn"] Dec 04 15:19:31 crc kubenswrapper[4946]: I1204 15:19:31.772468 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7777r\" (UniqueName: \"kubernetes.io/projected/b16b34e5-fb44-4972-93f5-fcd0432266ff-kube-api-access-7777r\") pod \"openstack-operator-index-dkskn\" (UID: \"b16b34e5-fb44-4972-93f5-fcd0432266ff\") " pod="openstack-operators/openstack-operator-index-dkskn" Dec 04 15:19:31 crc kubenswrapper[4946]: I1204 15:19:31.874085 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7777r\" (UniqueName: \"kubernetes.io/projected/b16b34e5-fb44-4972-93f5-fcd0432266ff-kube-api-access-7777r\") pod \"openstack-operator-index-dkskn\" (UID: \"b16b34e5-fb44-4972-93f5-fcd0432266ff\") " pod="openstack-operators/openstack-operator-index-dkskn" Dec 04 15:19:31 crc kubenswrapper[4946]: I1204 15:19:31.904581 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7777r\" (UniqueName: \"kubernetes.io/projected/b16b34e5-fb44-4972-93f5-fcd0432266ff-kube-api-access-7777r\") pod \"openstack-operator-index-dkskn\" (UID: \"b16b34e5-fb44-4972-93f5-fcd0432266ff\") " pod="openstack-operators/openstack-operator-index-dkskn" Dec 04 15:19:32 crc kubenswrapper[4946]: I1204 15:19:32.018468 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dkskn" Dec 04 15:19:32 crc kubenswrapper[4946]: I1204 15:19:32.687728 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-dkskn"] Dec 04 15:19:32 crc kubenswrapper[4946]: I1204 15:19:32.705172 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 15:19:33 crc kubenswrapper[4946]: I1204 15:19:33.711287 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dkskn" event={"ID":"b16b34e5-fb44-4972-93f5-fcd0432266ff","Type":"ContainerStarted","Data":"6760abe6b2082b6628e998f809f0cb99beda049e186900a6ad70fd794c5caac9"} Dec 04 15:19:33 crc kubenswrapper[4946]: I1204 15:19:33.870572 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-dkskn"] Dec 04 15:19:34 crc kubenswrapper[4946]: I1204 15:19:34.277308 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-ttbdt"] Dec 04 15:19:34 crc kubenswrapper[4946]: I1204 15:19:34.278905 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ttbdt" Dec 04 15:19:34 crc kubenswrapper[4946]: I1204 15:19:34.286387 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ttbdt"] Dec 04 15:19:34 crc kubenswrapper[4946]: I1204 15:19:34.337338 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xh8d\" (UniqueName: \"kubernetes.io/projected/06d05850-f87f-4944-be9f-c3f86f6bbc3e-kube-api-access-6xh8d\") pod \"openstack-operator-index-ttbdt\" (UID: \"06d05850-f87f-4944-be9f-c3f86f6bbc3e\") " pod="openstack-operators/openstack-operator-index-ttbdt" Dec 04 15:19:34 crc kubenswrapper[4946]: I1204 15:19:34.439003 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xh8d\" (UniqueName: \"kubernetes.io/projected/06d05850-f87f-4944-be9f-c3f86f6bbc3e-kube-api-access-6xh8d\") pod \"openstack-operator-index-ttbdt\" (UID: \"06d05850-f87f-4944-be9f-c3f86f6bbc3e\") " pod="openstack-operators/openstack-operator-index-ttbdt" Dec 04 15:19:34 crc kubenswrapper[4946]: I1204 15:19:34.457988 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xh8d\" (UniqueName: \"kubernetes.io/projected/06d05850-f87f-4944-be9f-c3f86f6bbc3e-kube-api-access-6xh8d\") pod \"openstack-operator-index-ttbdt\" (UID: \"06d05850-f87f-4944-be9f-c3f86f6bbc3e\") " pod="openstack-operators/openstack-operator-index-ttbdt" Dec 04 15:19:34 crc kubenswrapper[4946]: I1204 15:19:34.599192 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ttbdt" Dec 04 15:19:35 crc kubenswrapper[4946]: I1204 15:19:35.178817 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ttbdt"] Dec 04 15:19:35 crc kubenswrapper[4946]: W1204 15:19:35.190030 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06d05850_f87f_4944_be9f_c3f86f6bbc3e.slice/crio-6a22a0e7fd8b4adb83c289d7ee7fe11d8d7ddbc346dbdb44ba2c6eab26bc6c89 WatchSource:0}: Error finding container 6a22a0e7fd8b4adb83c289d7ee7fe11d8d7ddbc346dbdb44ba2c6eab26bc6c89: Status 404 returned error can't find the container with id 6a22a0e7fd8b4adb83c289d7ee7fe11d8d7ddbc346dbdb44ba2c6eab26bc6c89 Dec 04 15:19:35 crc kubenswrapper[4946]: I1204 15:19:35.728091 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ttbdt" event={"ID":"06d05850-f87f-4944-be9f-c3f86f6bbc3e","Type":"ContainerStarted","Data":"6a22a0e7fd8b4adb83c289d7ee7fe11d8d7ddbc346dbdb44ba2c6eab26bc6c89"} Dec 04 15:19:37 crc kubenswrapper[4946]: I1204 15:19:37.026046 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-9r7z7" Dec 04 15:19:38 crc kubenswrapper[4946]: I1204 15:19:38.751362 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ttbdt" event={"ID":"06d05850-f87f-4944-be9f-c3f86f6bbc3e","Type":"ContainerStarted","Data":"e3afc8416629c19db125cc7dc2a87677c1855270112f37a4fa4f21fefee14363"} Dec 04 15:19:38 crc kubenswrapper[4946]: I1204 15:19:38.770836 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dkskn" event={"ID":"b16b34e5-fb44-4972-93f5-fcd0432266ff","Type":"ContainerStarted","Data":"62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e"} Dec 04 15:19:38 crc kubenswrapper[4946]: I1204 15:19:38.771147 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-dkskn" podUID="b16b34e5-fb44-4972-93f5-fcd0432266ff" containerName="registry-server" containerID="cri-o://62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e" gracePeriod=2 Dec 04 15:19:38 crc kubenswrapper[4946]: I1204 15:19:38.782661 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-ttbdt" podStartSLOduration=2.031574168 podStartE2EDuration="4.782600192s" podCreationTimestamp="2025-12-04 15:19:34 +0000 UTC" firstStartedPulling="2025-12-04 15:19:35.191713976 +0000 UTC m=+1026.077757617" lastFinishedPulling="2025-12-04 15:19:37.94274 +0000 UTC m=+1028.828783641" observedRunningTime="2025-12-04 15:19:38.782433368 +0000 UTC m=+1029.668477029" watchObservedRunningTime="2025-12-04 15:19:38.782600192 +0000 UTC m=+1029.668643833" Dec 04 15:19:38 crc kubenswrapper[4946]: I1204 15:19:38.807709 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-dkskn" podStartSLOduration=2.617935122 podStartE2EDuration="7.807670294s" podCreationTimestamp="2025-12-04 15:19:31 +0000 UTC" firstStartedPulling="2025-12-04 15:19:32.7049158 +0000 UTC m=+1023.590959441" lastFinishedPulling="2025-12-04 15:19:37.894650972 +0000 UTC m=+1028.780694613" observedRunningTime="2025-12-04 15:19:38.802603296 +0000 UTC m=+1029.688646937" watchObservedRunningTime="2025-12-04 15:19:38.807670294 +0000 UTC m=+1029.693713955" Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.161103 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dkskn" Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.238022 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7777r\" (UniqueName: \"kubernetes.io/projected/b16b34e5-fb44-4972-93f5-fcd0432266ff-kube-api-access-7777r\") pod \"b16b34e5-fb44-4972-93f5-fcd0432266ff\" (UID: \"b16b34e5-fb44-4972-93f5-fcd0432266ff\") " Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.247290 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b16b34e5-fb44-4972-93f5-fcd0432266ff-kube-api-access-7777r" (OuterVolumeSpecName: "kube-api-access-7777r") pod "b16b34e5-fb44-4972-93f5-fcd0432266ff" (UID: "b16b34e5-fb44-4972-93f5-fcd0432266ff"). InnerVolumeSpecName "kube-api-access-7777r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.339984 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7777r\" (UniqueName: \"kubernetes.io/projected/b16b34e5-fb44-4972-93f5-fcd0432266ff-kube-api-access-7777r\") on node \"crc\" DevicePath \"\"" Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.780193 4946 generic.go:334] "Generic (PLEG): container finished" podID="b16b34e5-fb44-4972-93f5-fcd0432266ff" containerID="62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e" exitCode=0 Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.780258 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dkskn" event={"ID":"b16b34e5-fb44-4972-93f5-fcd0432266ff","Type":"ContainerDied","Data":"62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e"} Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.781253 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dkskn" event={"ID":"b16b34e5-fb44-4972-93f5-fcd0432266ff","Type":"ContainerDied","Data":"6760abe6b2082b6628e998f809f0cb99beda049e186900a6ad70fd794c5caac9"} Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.781289 4946 scope.go:117] "RemoveContainer" containerID="62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e" Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.780290 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dkskn" Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.800888 4946 scope.go:117] "RemoveContainer" containerID="62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e" Dec 04 15:19:39 crc kubenswrapper[4946]: E1204 15:19:39.801534 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e\": container with ID starting with 62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e not found: ID does not exist" containerID="62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e" Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.801593 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e"} err="failed to get container status \"62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e\": rpc error: code = NotFound desc = could not find container \"62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e\": container with ID starting with 62cd4edb47335b49b9aee4bdc0903ba85351ee9213acd4ae7ac9bc7fca6c3f8e not found: ID does not exist" Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.803479 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-dkskn"] Dec 04 15:19:39 crc kubenswrapper[4946]: I1204 15:19:39.808801 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-dkskn"] Dec 04 15:19:41 crc kubenswrapper[4946]: I1204 15:19:41.462519 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b16b34e5-fb44-4972-93f5-fcd0432266ff" path="/var/lib/kubelet/pods/b16b34e5-fb44-4972-93f5-fcd0432266ff/volumes" Dec 04 15:19:44 crc kubenswrapper[4946]: I1204 15:19:44.599323 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-ttbdt" Dec 04 15:19:44 crc kubenswrapper[4946]: I1204 15:19:44.599657 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-ttbdt" Dec 04 15:19:44 crc kubenswrapper[4946]: I1204 15:19:44.629625 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-ttbdt" Dec 04 15:19:44 crc kubenswrapper[4946]: I1204 15:19:44.850481 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-ttbdt" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.047570 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s"] Dec 04 15:19:51 crc kubenswrapper[4946]: E1204 15:19:51.048517 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b16b34e5-fb44-4972-93f5-fcd0432266ff" containerName="registry-server" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.048533 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b16b34e5-fb44-4972-93f5-fcd0432266ff" containerName="registry-server" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.048675 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="b16b34e5-fb44-4972-93f5-fcd0432266ff" containerName="registry-server" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.049679 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.052678 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-8hkss" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.058519 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s"] Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.133165 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-util\") pod \"de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.133245 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-bundle\") pod \"de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.133520 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njfwb\" (UniqueName: \"kubernetes.io/projected/f8b2c834-77be-4c4c-90f9-ab83696108a8-kube-api-access-njfwb\") pod \"de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.235266 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-util\") pod \"de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.235349 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-bundle\") pod \"de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.235392 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njfwb\" (UniqueName: \"kubernetes.io/projected/f8b2c834-77be-4c4c-90f9-ab83696108a8-kube-api-access-njfwb\") pod \"de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.236001 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-bundle\") pod \"de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.236294 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-util\") pod \"de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.259989 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njfwb\" (UniqueName: \"kubernetes.io/projected/f8b2c834-77be-4c4c-90f9-ab83696108a8-kube-api-access-njfwb\") pod \"de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.402237 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:51 crc kubenswrapper[4946]: I1204 15:19:51.970062 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s"] Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.486222 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.486622 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.486671 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.487454 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"53eeb1a5a8af1654e1978db4066dd9d62d695280b47fdbadb0ee39d16803c85c"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.487828 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://53eeb1a5a8af1654e1978db4066dd9d62d695280b47fdbadb0ee39d16803c85c" gracePeriod=600 Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.896199 4946 generic.go:334] "Generic (PLEG): container finished" podID="f8b2c834-77be-4c4c-90f9-ab83696108a8" containerID="f8467465f9325b9885694a7e246d093e9e34e549aeaddc8036c418cc596cfc7b" exitCode=0 Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.896265 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" event={"ID":"f8b2c834-77be-4c4c-90f9-ab83696108a8","Type":"ContainerDied","Data":"f8467465f9325b9885694a7e246d093e9e34e549aeaddc8036c418cc596cfc7b"} Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.896606 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" event={"ID":"f8b2c834-77be-4c4c-90f9-ab83696108a8","Type":"ContainerStarted","Data":"5eb8aa1b2d5abc10a1f63ec47a7bf32dac3e99ad1a26117572c696bce85edb71"} Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.902014 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="53eeb1a5a8af1654e1978db4066dd9d62d695280b47fdbadb0ee39d16803c85c" exitCode=0 Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.902054 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"53eeb1a5a8af1654e1978db4066dd9d62d695280b47fdbadb0ee39d16803c85c"} Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.902156 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"8a7e012c140a228f19f6cb14e9f9072b524033b54270efdccbdb4e5e1c52ef9d"} Dec 04 15:19:52 crc kubenswrapper[4946]: I1204 15:19:52.902179 4946 scope.go:117] "RemoveContainer" containerID="4a5d66e82f6e47d86ff02c63e947477c5c01ef6d0d42318658f454eca8014377" Dec 04 15:19:53 crc kubenswrapper[4946]: I1204 15:19:53.909979 4946 generic.go:334] "Generic (PLEG): container finished" podID="f8b2c834-77be-4c4c-90f9-ab83696108a8" containerID="53bfbbd4519742d2a21a0ca06f2648f91054612a09918b219117ae503a83945a" exitCode=0 Dec 04 15:19:53 crc kubenswrapper[4946]: I1204 15:19:53.910054 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" event={"ID":"f8b2c834-77be-4c4c-90f9-ab83696108a8","Type":"ContainerDied","Data":"53bfbbd4519742d2a21a0ca06f2648f91054612a09918b219117ae503a83945a"} Dec 04 15:19:54 crc kubenswrapper[4946]: I1204 15:19:54.924796 4946 generic.go:334] "Generic (PLEG): container finished" podID="f8b2c834-77be-4c4c-90f9-ab83696108a8" containerID="e8af055a81f0ad38526eff6bb5f255f035c76f8dd2a0a78c62b60cd589da582c" exitCode=0 Dec 04 15:19:54 crc kubenswrapper[4946]: I1204 15:19:54.925051 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" event={"ID":"f8b2c834-77be-4c4c-90f9-ab83696108a8","Type":"ContainerDied","Data":"e8af055a81f0ad38526eff6bb5f255f035c76f8dd2a0a78c62b60cd589da582c"} Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.212859 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.311376 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-bundle\") pod \"f8b2c834-77be-4c4c-90f9-ab83696108a8\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.311518 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njfwb\" (UniqueName: \"kubernetes.io/projected/f8b2c834-77be-4c4c-90f9-ab83696108a8-kube-api-access-njfwb\") pod \"f8b2c834-77be-4c4c-90f9-ab83696108a8\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.311553 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-util\") pod \"f8b2c834-77be-4c4c-90f9-ab83696108a8\" (UID: \"f8b2c834-77be-4c4c-90f9-ab83696108a8\") " Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.313085 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-bundle" (OuterVolumeSpecName: "bundle") pod "f8b2c834-77be-4c4c-90f9-ab83696108a8" (UID: "f8b2c834-77be-4c4c-90f9-ab83696108a8"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.319225 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8b2c834-77be-4c4c-90f9-ab83696108a8-kube-api-access-njfwb" (OuterVolumeSpecName: "kube-api-access-njfwb") pod "f8b2c834-77be-4c4c-90f9-ab83696108a8" (UID: "f8b2c834-77be-4c4c-90f9-ab83696108a8"). InnerVolumeSpecName "kube-api-access-njfwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.326952 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-util" (OuterVolumeSpecName: "util") pod "f8b2c834-77be-4c4c-90f9-ab83696108a8" (UID: "f8b2c834-77be-4c4c-90f9-ab83696108a8"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.414045 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njfwb\" (UniqueName: \"kubernetes.io/projected/f8b2c834-77be-4c4c-90f9-ab83696108a8-kube-api-access-njfwb\") on node \"crc\" DevicePath \"\"" Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.414093 4946 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-util\") on node \"crc\" DevicePath \"\"" Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.414105 4946 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f8b2c834-77be-4c4c-90f9-ab83696108a8-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.944426 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" event={"ID":"f8b2c834-77be-4c4c-90f9-ab83696108a8","Type":"ContainerDied","Data":"5eb8aa1b2d5abc10a1f63ec47a7bf32dac3e99ad1a26117572c696bce85edb71"} Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.944498 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5eb8aa1b2d5abc10a1f63ec47a7bf32dac3e99ad1a26117572c696bce85edb71" Dec 04 15:19:56 crc kubenswrapper[4946]: I1204 15:19:56.944500 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s" Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.311855 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt"] Dec 04 15:20:03 crc kubenswrapper[4946]: E1204 15:20:03.312831 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8b2c834-77be-4c4c-90f9-ab83696108a8" containerName="pull" Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.312847 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8b2c834-77be-4c4c-90f9-ab83696108a8" containerName="pull" Dec 04 15:20:03 crc kubenswrapper[4946]: E1204 15:20:03.312862 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8b2c834-77be-4c4c-90f9-ab83696108a8" containerName="util" Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.312868 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8b2c834-77be-4c4c-90f9-ab83696108a8" containerName="util" Dec 04 15:20:03 crc kubenswrapper[4946]: E1204 15:20:03.312878 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8b2c834-77be-4c4c-90f9-ab83696108a8" containerName="extract" Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.312886 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8b2c834-77be-4c4c-90f9-ab83696108a8" containerName="extract" Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.313026 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8b2c834-77be-4c4c-90f9-ab83696108a8" containerName="extract" Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.313512 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt" Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.316780 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-9v8wx" Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.406456 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt"] Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.419988 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chg45\" (UniqueName: \"kubernetes.io/projected/aa1fefcd-b28b-4ecf-9b92-e1fabe27cd26-kube-api-access-chg45\") pod \"openstack-operator-controller-operator-769dd9b968-btnbt\" (UID: \"aa1fefcd-b28b-4ecf-9b92-e1fabe27cd26\") " pod="openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt" Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.522994 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chg45\" (UniqueName: \"kubernetes.io/projected/aa1fefcd-b28b-4ecf-9b92-e1fabe27cd26-kube-api-access-chg45\") pod \"openstack-operator-controller-operator-769dd9b968-btnbt\" (UID: \"aa1fefcd-b28b-4ecf-9b92-e1fabe27cd26\") " pod="openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt" Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.548530 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chg45\" (UniqueName: \"kubernetes.io/projected/aa1fefcd-b28b-4ecf-9b92-e1fabe27cd26-kube-api-access-chg45\") pod \"openstack-operator-controller-operator-769dd9b968-btnbt\" (UID: \"aa1fefcd-b28b-4ecf-9b92-e1fabe27cd26\") " pod="openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt" Dec 04 15:20:03 crc kubenswrapper[4946]: I1204 15:20:03.633955 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt" Dec 04 15:20:04 crc kubenswrapper[4946]: I1204 15:20:04.094538 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt"] Dec 04 15:20:05 crc kubenswrapper[4946]: I1204 15:20:05.003840 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt" event={"ID":"aa1fefcd-b28b-4ecf-9b92-e1fabe27cd26","Type":"ContainerStarted","Data":"985de53898c00318a56bac1f95438c380f0cd77b49720f778e7611b678fc17a7"} Dec 04 15:20:10 crc kubenswrapper[4946]: I1204 15:20:10.054285 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt" event={"ID":"aa1fefcd-b28b-4ecf-9b92-e1fabe27cd26","Type":"ContainerStarted","Data":"9e17c571fe12ffb8f2b4fc195e73a5b424020fddc09f79f9e26507b6136fbf6e"} Dec 04 15:20:10 crc kubenswrapper[4946]: I1204 15:20:10.054915 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt" Dec 04 15:20:10 crc kubenswrapper[4946]: I1204 15:20:10.083832 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt" podStartSLOduration=1.665185323 podStartE2EDuration="7.083813381s" podCreationTimestamp="2025-12-04 15:20:03 +0000 UTC" firstStartedPulling="2025-12-04 15:20:04.112313265 +0000 UTC m=+1054.998356906" lastFinishedPulling="2025-12-04 15:20:09.530941323 +0000 UTC m=+1060.416984964" observedRunningTime="2025-12-04 15:20:10.082093005 +0000 UTC m=+1060.968136646" watchObservedRunningTime="2025-12-04 15:20:10.083813381 +0000 UTC m=+1060.969857022" Dec 04 15:20:23 crc kubenswrapper[4946]: I1204 15:20:23.637919 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-769dd9b968-btnbt" Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.919226 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95"] Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.921018 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95" Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.922521 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5qzt\" (UniqueName: \"kubernetes.io/projected/6d911452-36e0-4227-9068-4ed0b86f025c-kube-api-access-p5qzt\") pod \"barbican-operator-controller-manager-7d9dfd778-hdr95\" (UID: \"6d911452-36e0-4227-9068-4ed0b86f025c\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95" Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.923696 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk"] Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.924363 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-l5jvh" Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.924880 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk" Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.928710 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-t5smt" Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.947969 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh"] Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.949694 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.951423 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-qtp57" Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.975461 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh"] Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.981381 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95"] Dec 04 15:20:44 crc kubenswrapper[4946]: I1204 15:20:44.994184 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.000482 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.001500 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.008576 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-nrpbj" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.011759 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.013315 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.021465 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.022420 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-fl5xt" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.023037 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.023778 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkqtb\" (UniqueName: \"kubernetes.io/projected/866cf896-d679-426b-80d9-de7a368958ed-kube-api-access-qkqtb\") pod \"horizon-operator-controller-manager-68c6d99b8f-rrwq2\" (UID: \"866cf896-d679-426b-80d9-de7a368958ed\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.023853 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltqgs\" (UniqueName: \"kubernetes.io/projected/76e27cbb-fdb9-447e-983f-48b7dbe8d46d-kube-api-access-ltqgs\") pod \"cinder-operator-controller-manager-859b6ccc6-29dnk\" (UID: \"76e27cbb-fdb9-447e-983f-48b7dbe8d46d\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.023887 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r95x8\" (UniqueName: \"kubernetes.io/projected/a69ef7eb-6ffc-47cb-b7ee-7c46734d0857-kube-api-access-r95x8\") pod \"glance-operator-controller-manager-77987cd8cd-fsn5z\" (UID: \"a69ef7eb-6ffc-47cb-b7ee-7c46734d0857\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.023925 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65qnq\" (UniqueName: \"kubernetes.io/projected/ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5-kube-api-access-65qnq\") pod \"heat-operator-controller-manager-5f64f6f8bb-bd6fv\" (UID: \"ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.023958 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntxgw\" (UniqueName: \"kubernetes.io/projected/d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f-kube-api-access-ntxgw\") pod \"designate-operator-controller-manager-78b4bc895b-zdqwh\" (UID: \"d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.024013 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5qzt\" (UniqueName: \"kubernetes.io/projected/6d911452-36e0-4227-9068-4ed0b86f025c-kube-api-access-p5qzt\") pod \"barbican-operator-controller-manager-7d9dfd778-hdr95\" (UID: \"6d911452-36e0-4227-9068-4ed0b86f025c\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.035051 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-58l9f" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.042216 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.056194 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.057714 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.066905 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.067234 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-6rvkj" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.072306 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5qzt\" (UniqueName: \"kubernetes.io/projected/6d911452-36e0-4227-9068-4ed0b86f025c-kube-api-access-p5qzt\") pod \"barbican-operator-controller-manager-7d9dfd778-hdr95\" (UID: \"6d911452-36e0-4227-9068-4ed0b86f025c\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.082613 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.103491 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.124943 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.125312 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkqtb\" (UniqueName: \"kubernetes.io/projected/866cf896-d679-426b-80d9-de7a368958ed-kube-api-access-qkqtb\") pod \"horizon-operator-controller-manager-68c6d99b8f-rrwq2\" (UID: \"866cf896-d679-426b-80d9-de7a368958ed\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.125390 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltqgs\" (UniqueName: \"kubernetes.io/projected/76e27cbb-fdb9-447e-983f-48b7dbe8d46d-kube-api-access-ltqgs\") pod \"cinder-operator-controller-manager-859b6ccc6-29dnk\" (UID: \"76e27cbb-fdb9-447e-983f-48b7dbe8d46d\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.125428 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r95x8\" (UniqueName: \"kubernetes.io/projected/a69ef7eb-6ffc-47cb-b7ee-7c46734d0857-kube-api-access-r95x8\") pod \"glance-operator-controller-manager-77987cd8cd-fsn5z\" (UID: \"a69ef7eb-6ffc-47cb-b7ee-7c46734d0857\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.125463 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9rwr\" (UniqueName: \"kubernetes.io/projected/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-kube-api-access-w9rwr\") pod \"infra-operator-controller-manager-57548d458d-tpk4r\" (UID: \"421ad636-5eeb-4596-84c0-a0ca3cfbdef2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.125499 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65qnq\" (UniqueName: \"kubernetes.io/projected/ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5-kube-api-access-65qnq\") pod \"heat-operator-controller-manager-5f64f6f8bb-bd6fv\" (UID: \"ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.125530 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntxgw\" (UniqueName: \"kubernetes.io/projected/d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f-kube-api-access-ntxgw\") pod \"designate-operator-controller-manager-78b4bc895b-zdqwh\" (UID: \"d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.125559 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert\") pod \"infra-operator-controller-manager-57548d458d-tpk4r\" (UID: \"421ad636-5eeb-4596-84c0-a0ca3cfbdef2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.126312 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.132501 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-wf57c" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.161288 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.164407 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkqtb\" (UniqueName: \"kubernetes.io/projected/866cf896-d679-426b-80d9-de7a368958ed-kube-api-access-qkqtb\") pod \"horizon-operator-controller-manager-68c6d99b8f-rrwq2\" (UID: \"866cf896-d679-426b-80d9-de7a368958ed\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.165183 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntxgw\" (UniqueName: \"kubernetes.io/projected/d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f-kube-api-access-ntxgw\") pod \"designate-operator-controller-manager-78b4bc895b-zdqwh\" (UID: \"d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.171603 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.181051 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r95x8\" (UniqueName: \"kubernetes.io/projected/a69ef7eb-6ffc-47cb-b7ee-7c46734d0857-kube-api-access-r95x8\") pod \"glance-operator-controller-manager-77987cd8cd-fsn5z\" (UID: \"a69ef7eb-6ffc-47cb-b7ee-7c46734d0857\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.192924 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65qnq\" (UniqueName: \"kubernetes.io/projected/ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5-kube-api-access-65qnq\") pod \"heat-operator-controller-manager-5f64f6f8bb-bd6fv\" (UID: \"ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.197177 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltqgs\" (UniqueName: \"kubernetes.io/projected/76e27cbb-fdb9-447e-983f-48b7dbe8d46d-kube-api-access-ltqgs\") pod \"cinder-operator-controller-manager-859b6ccc6-29dnk\" (UID: \"76e27cbb-fdb9-447e-983f-48b7dbe8d46d\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.203466 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.205401 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.219529 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-rhd7t" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.219825 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.226835 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9rwr\" (UniqueName: \"kubernetes.io/projected/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-kube-api-access-w9rwr\") pod \"infra-operator-controller-manager-57548d458d-tpk4r\" (UID: \"421ad636-5eeb-4596-84c0-a0ca3cfbdef2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.226932 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert\") pod \"infra-operator-controller-manager-57548d458d-tpk4r\" (UID: \"421ad636-5eeb-4596-84c0-a0ca3cfbdef2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:20:45 crc kubenswrapper[4946]: E1204 15:20:45.227138 4946 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 04 15:20:45 crc kubenswrapper[4946]: E1204 15:20:45.227216 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert podName:421ad636-5eeb-4596-84c0-a0ca3cfbdef2 nodeName:}" failed. No retries permitted until 2025-12-04 15:20:45.727188917 +0000 UTC m=+1096.613232558 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert") pod "infra-operator-controller-manager-57548d458d-tpk4r" (UID: "421ad636-5eeb-4596-84c0-a0ca3cfbdef2") : secret "infra-operator-webhook-server-cert" not found Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.246043 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.260567 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.281421 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.295013 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9rwr\" (UniqueName: \"kubernetes.io/projected/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-kube-api-access-w9rwr\") pod \"infra-operator-controller-manager-57548d458d-tpk4r\" (UID: \"421ad636-5eeb-4596-84c0-a0ca3cfbdef2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.295142 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.296542 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.304914 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-khbmw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.352763 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.358663 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.360041 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.363409 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9g4l\" (UniqueName: \"kubernetes.io/projected/965366ad-4bb5-424a-9cf0-d09c42dec244-kube-api-access-c9g4l\") pod \"keystone-operator-controller-manager-7765d96ddf-8nbch\" (UID: \"965366ad-4bb5-424a-9cf0-d09c42dec244\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.363628 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fqrq\" (UniqueName: \"kubernetes.io/projected/9584ac77-41db-4621-a720-88b7c107ffa2-kube-api-access-8fqrq\") pod \"ironic-operator-controller-manager-6c548fd776-bnspk\" (UID: \"9584ac77-41db-4621-a720-88b7c107ffa2\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.403945 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.447337 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.449231 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.454049 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-4qrq4" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.465731 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9g4l\" (UniqueName: \"kubernetes.io/projected/965366ad-4bb5-424a-9cf0-d09c42dec244-kube-api-access-c9g4l\") pod \"keystone-operator-controller-manager-7765d96ddf-8nbch\" (UID: \"965366ad-4bb5-424a-9cf0-d09c42dec244\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.466278 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fqrq\" (UniqueName: \"kubernetes.io/projected/9584ac77-41db-4621-a720-88b7c107ffa2-kube-api-access-8fqrq\") pod \"ironic-operator-controller-manager-6c548fd776-bnspk\" (UID: \"9584ac77-41db-4621-a720-88b7c107ffa2\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.466329 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbs76\" (UniqueName: \"kubernetes.io/projected/52d7003e-8315-49b6-b086-f0655f555960-kube-api-access-kbs76\") pod \"manila-operator-controller-manager-7c79b5df47-j8r75\" (UID: \"52d7003e-8315-49b6-b086-f0655f555960\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.501446 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fqrq\" (UniqueName: \"kubernetes.io/projected/9584ac77-41db-4621-a720-88b7c107ffa2-kube-api-access-8fqrq\") pod \"ironic-operator-controller-manager-6c548fd776-bnspk\" (UID: \"9584ac77-41db-4621-a720-88b7c107ffa2\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.532616 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9g4l\" (UniqueName: \"kubernetes.io/projected/965366ad-4bb5-424a-9cf0-d09c42dec244-kube-api-access-c9g4l\") pod \"keystone-operator-controller-manager-7765d96ddf-8nbch\" (UID: \"965366ad-4bb5-424a-9cf0-d09c42dec244\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.568222 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp56z\" (UniqueName: \"kubernetes.io/projected/db2d87e7-4cf3-4d0d-b77e-2d02a073872c-kube-api-access-rp56z\") pod \"mariadb-operator-controller-manager-56bbcc9d85-2wkbw\" (UID: \"db2d87e7-4cf3-4d0d-b77e-2d02a073872c\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.568298 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbs76\" (UniqueName: \"kubernetes.io/projected/52d7003e-8315-49b6-b086-f0655f555960-kube-api-access-kbs76\") pod \"manila-operator-controller-manager-7c79b5df47-j8r75\" (UID: \"52d7003e-8315-49b6-b086-f0655f555960\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.578576 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.580413 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.586141 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-bqj8s" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.592968 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbs76\" (UniqueName: \"kubernetes.io/projected/52d7003e-8315-49b6-b086-f0655f555960-kube-api-access-kbs76\") pod \"manila-operator-controller-manager-7c79b5df47-j8r75\" (UID: \"52d7003e-8315-49b6-b086-f0655f555960\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.602790 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.641907 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.669582 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvzg7\" (UniqueName: \"kubernetes.io/projected/41b063f1-7646-49dc-85e4-9e7185220de1-kube-api-access-qvzg7\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-ptn6j\" (UID: \"41b063f1-7646-49dc-85e4-9e7185220de1\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.669668 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp56z\" (UniqueName: \"kubernetes.io/projected/db2d87e7-4cf3-4d0d-b77e-2d02a073872c-kube-api-access-rp56z\") pod \"mariadb-operator-controller-manager-56bbcc9d85-2wkbw\" (UID: \"db2d87e7-4cf3-4d0d-b77e-2d02a073872c\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.684791 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.688303 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.690886 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp56z\" (UniqueName: \"kubernetes.io/projected/db2d87e7-4cf3-4d0d-b77e-2d02a073872c-kube-api-access-rp56z\") pod \"mariadb-operator-controller-manager-56bbcc9d85-2wkbw\" (UID: \"db2d87e7-4cf3-4d0d-b77e-2d02a073872c\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.697886 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.699616 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.702657 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-4f768" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.741963 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.757856 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-77ss8"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.759657 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-77ss8" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.764521 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.765439 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-b9x6p" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.770417 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-77ss8"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.772338 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert\") pod \"infra-operator-controller-manager-57548d458d-tpk4r\" (UID: \"421ad636-5eeb-4596-84c0-a0ca3cfbdef2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.772417 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvzg7\" (UniqueName: \"kubernetes.io/projected/41b063f1-7646-49dc-85e4-9e7185220de1-kube-api-access-qvzg7\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-ptn6j\" (UID: \"41b063f1-7646-49dc-85e4-9e7185220de1\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j" Dec 04 15:20:45 crc kubenswrapper[4946]: E1204 15:20:45.773088 4946 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 04 15:20:45 crc kubenswrapper[4946]: E1204 15:20:45.773159 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert podName:421ad636-5eeb-4596-84c0-a0ca3cfbdef2 nodeName:}" failed. No retries permitted until 2025-12-04 15:20:46.773141011 +0000 UTC m=+1097.659184652 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert") pod "infra-operator-controller-manager-57548d458d-tpk4r" (UID: "421ad636-5eeb-4596-84c0-a0ca3cfbdef2") : secret "infra-operator-webhook-server-cert" not found Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.786968 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.793487 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.795856 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.803285 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-rmm7n" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.806434 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.818904 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvzg7\" (UniqueName: \"kubernetes.io/projected/41b063f1-7646-49dc-85e4-9e7185220de1-kube-api-access-qvzg7\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-ptn6j\" (UID: \"41b063f1-7646-49dc-85e4-9e7185220de1\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.818996 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.820765 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.824224 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-hvrzs" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.826661 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.833215 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.838281 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.840000 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.844137 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-gx9tk" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.848008 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.855896 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.862309 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.874489 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4frldw\" (UID: \"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.874554 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pmc2\" (UniqueName: \"kubernetes.io/projected/10b2d29b-4444-4dfe-ad8f-ad913798df88-kube-api-access-6pmc2\") pod \"nova-operator-controller-manager-697bc559fc-vzpjw\" (UID: \"10b2d29b-4444-4dfe-ad8f-ad913798df88\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.874627 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhgsl\" (UniqueName: \"kubernetes.io/projected/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-kube-api-access-jhgsl\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4frldw\" (UID: \"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.874652 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt7px\" (UniqueName: \"kubernetes.io/projected/262aaccf-cdc8-44b6-8fc6-8702491cfad8-kube-api-access-gt7px\") pod \"octavia-operator-controller-manager-998648c74-77ss8\" (UID: \"262aaccf-cdc8-44b6-8fc6-8702491cfad8\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-77ss8" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.875187 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.877799 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-t7krh" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.913232 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.915039 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.920169 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-4r5hb" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.929913 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.951528 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.953420 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.959953 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-7rrzl" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.961220 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.971676 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh"] Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.975572 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4frldw\" (UID: \"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.975633 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pmc2\" (UniqueName: \"kubernetes.io/projected/10b2d29b-4444-4dfe-ad8f-ad913798df88-kube-api-access-6pmc2\") pod \"nova-operator-controller-manager-697bc559fc-vzpjw\" (UID: \"10b2d29b-4444-4dfe-ad8f-ad913798df88\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.975709 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhgsl\" (UniqueName: \"kubernetes.io/projected/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-kube-api-access-jhgsl\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4frldw\" (UID: \"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.975728 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt7px\" (UniqueName: \"kubernetes.io/projected/262aaccf-cdc8-44b6-8fc6-8702491cfad8-kube-api-access-gt7px\") pod \"octavia-operator-controller-manager-998648c74-77ss8\" (UID: \"262aaccf-cdc8-44b6-8fc6-8702491cfad8\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-77ss8" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.975775 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwrlm\" (UniqueName: \"kubernetes.io/projected/4439c79c-3951-4b61-98ad-86f417432fde-kube-api-access-bwrlm\") pod \"ovn-operator-controller-manager-b6456fdb6-tp7zf\" (UID: \"4439c79c-3951-4b61-98ad-86f417432fde\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.975825 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf4hr\" (UniqueName: \"kubernetes.io/projected/b7a5eb4e-a8b8-43e5-95cf-51f40d454d79-kube-api-access-jf4hr\") pod \"swift-operator-controller-manager-5f8c65bbfc-25vwl\" (UID: \"b7a5eb4e-a8b8-43e5-95cf-51f40d454d79\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.975854 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jntt9\" (UniqueName: \"kubernetes.io/projected/110a7ea7-4b02-4f5d-be16-87c4f0090eec-kube-api-access-jntt9\") pod \"placement-operator-controller-manager-78f8948974-rdrpr\" (UID: \"110a7ea7-4b02-4f5d-be16-87c4f0090eec\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" Dec 04 15:20:45 crc kubenswrapper[4946]: E1204 15:20:45.976009 4946 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 15:20:45 crc kubenswrapper[4946]: E1204 15:20:45.976064 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert podName:e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6 nodeName:}" failed. No retries permitted until 2025-12-04 15:20:46.476044425 +0000 UTC m=+1097.362088066 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" (UID: "e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 15:20:45 crc kubenswrapper[4946]: I1204 15:20:45.999354 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm"] Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.002292 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.006248 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-ntjqc" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.017382 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm"] Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.020504 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pmc2\" (UniqueName: \"kubernetes.io/projected/10b2d29b-4444-4dfe-ad8f-ad913798df88-kube-api-access-6pmc2\") pod \"nova-operator-controller-manager-697bc559fc-vzpjw\" (UID: \"10b2d29b-4444-4dfe-ad8f-ad913798df88\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.023608 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhgsl\" (UniqueName: \"kubernetes.io/projected/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-kube-api-access-jhgsl\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4frldw\" (UID: \"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.028828 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt7px\" (UniqueName: \"kubernetes.io/projected/262aaccf-cdc8-44b6-8fc6-8702491cfad8-kube-api-access-gt7px\") pod \"octavia-operator-controller-manager-998648c74-77ss8\" (UID: \"262aaccf-cdc8-44b6-8fc6-8702491cfad8\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-77ss8" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.042427 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq"] Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.043853 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.050683 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.051063 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-2lkdn" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.051233 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.055516 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.068757 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq"] Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.078349 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv52d\" (UniqueName: \"kubernetes.io/projected/c92477ee-92e6-4dca-af5d-9b0f44bcaf60-kube-api-access-gv52d\") pod \"test-operator-controller-manager-5854674fcc-bqtnh\" (UID: \"c92477ee-92e6-4dca-af5d-9b0f44bcaf60\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.078674 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwrlm\" (UniqueName: \"kubernetes.io/projected/4439c79c-3951-4b61-98ad-86f417432fde-kube-api-access-bwrlm\") pod \"ovn-operator-controller-manager-b6456fdb6-tp7zf\" (UID: \"4439c79c-3951-4b61-98ad-86f417432fde\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.079130 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf4hr\" (UniqueName: \"kubernetes.io/projected/b7a5eb4e-a8b8-43e5-95cf-51f40d454d79-kube-api-access-jf4hr\") pod \"swift-operator-controller-manager-5f8c65bbfc-25vwl\" (UID: \"b7a5eb4e-a8b8-43e5-95cf-51f40d454d79\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.079265 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jntt9\" (UniqueName: \"kubernetes.io/projected/110a7ea7-4b02-4f5d-be16-87c4f0090eec-kube-api-access-jntt9\") pod \"placement-operator-controller-manager-78f8948974-rdrpr\" (UID: \"110a7ea7-4b02-4f5d-be16-87c4f0090eec\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.079429 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qp8g5\" (UniqueName: \"kubernetes.io/projected/c9933077-41f3-425f-b478-c53691b7d817-kube-api-access-qp8g5\") pod \"telemetry-operator-controller-manager-5d9cf8555c-csjn7\" (UID: \"c9933077-41f3-425f-b478-c53691b7d817\") " pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.095702 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-77ss8" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.105392 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwrlm\" (UniqueName: \"kubernetes.io/projected/4439c79c-3951-4b61-98ad-86f417432fde-kube-api-access-bwrlm\") pod \"ovn-operator-controller-manager-b6456fdb6-tp7zf\" (UID: \"4439c79c-3951-4b61-98ad-86f417432fde\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.106078 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jntt9\" (UniqueName: \"kubernetes.io/projected/110a7ea7-4b02-4f5d-be16-87c4f0090eec-kube-api-access-jntt9\") pod \"placement-operator-controller-manager-78f8948974-rdrpr\" (UID: \"110a7ea7-4b02-4f5d-be16-87c4f0090eec\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.171911 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf4hr\" (UniqueName: \"kubernetes.io/projected/b7a5eb4e-a8b8-43e5-95cf-51f40d454d79-kube-api-access-jf4hr\") pod \"swift-operator-controller-manager-5f8c65bbfc-25vwl\" (UID: \"b7a5eb4e-a8b8-43e5-95cf-51f40d454d79\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.179297 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj"] Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.183333 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.187578 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qp8g5\" (UniqueName: \"kubernetes.io/projected/c9933077-41f3-425f-b478-c53691b7d817-kube-api-access-qp8g5\") pod \"telemetry-operator-controller-manager-5d9cf8555c-csjn7\" (UID: \"c9933077-41f3-425f-b478-c53691b7d817\") " pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.187706 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.187766 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpjmk\" (UniqueName: \"kubernetes.io/projected/6a246ded-a3c1-42c5-a6a7-648dec93f77f-kube-api-access-mpjmk\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.187841 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.187868 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv52d\" (UniqueName: \"kubernetes.io/projected/c92477ee-92e6-4dca-af5d-9b0f44bcaf60-kube-api-access-gv52d\") pod \"test-operator-controller-manager-5854674fcc-bqtnh\" (UID: \"c92477ee-92e6-4dca-af5d-9b0f44bcaf60\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.187907 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-947w4\" (UniqueName: \"kubernetes.io/projected/fa8a1267-46f8-4554-8a91-7389be265abd-kube-api-access-947w4\") pod \"watcher-operator-controller-manager-769dc69bc-f6jlm\" (UID: \"fa8a1267-46f8-4554-8a91-7389be265abd\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.205462 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.206692 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.214565 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj"] Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.215263 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-nrh9j" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.235421 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.245879 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qp8g5\" (UniqueName: \"kubernetes.io/projected/c9933077-41f3-425f-b478-c53691b7d817-kube-api-access-qp8g5\") pod \"telemetry-operator-controller-manager-5d9cf8555c-csjn7\" (UID: \"c9933077-41f3-425f-b478-c53691b7d817\") " pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.248851 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv52d\" (UniqueName: \"kubernetes.io/projected/c92477ee-92e6-4dca-af5d-9b0f44bcaf60-kube-api-access-gv52d\") pod \"test-operator-controller-manager-5854674fcc-bqtnh\" (UID: \"c92477ee-92e6-4dca-af5d-9b0f44bcaf60\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.289557 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.289629 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpjmk\" (UniqueName: \"kubernetes.io/projected/6a246ded-a3c1-42c5-a6a7-648dec93f77f-kube-api-access-mpjmk\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.289674 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7h64\" (UniqueName: \"kubernetes.io/projected/57d9b742-9429-43c6-8798-6813c321866f-kube-api-access-r7h64\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ffkxj\" (UID: \"57d9b742-9429-43c6-8798-6813c321866f\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.289711 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.289753 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-947w4\" (UniqueName: \"kubernetes.io/projected/fa8a1267-46f8-4554-8a91-7389be265abd-kube-api-access-947w4\") pod \"watcher-operator-controller-manager-769dc69bc-f6jlm\" (UID: \"fa8a1267-46f8-4554-8a91-7389be265abd\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.290238 4946 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.290451 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs podName:6a246ded-a3c1-42c5-a6a7-648dec93f77f nodeName:}" failed. No retries permitted until 2025-12-04 15:20:46.790405381 +0000 UTC m=+1097.676449072 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs") pod "openstack-operator-controller-manager-7b58c9d549-7lmqq" (UID: "6a246ded-a3c1-42c5-a6a7-648dec93f77f") : secret "webhook-server-cert" not found Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.290305 4946 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.290975 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs podName:6a246ded-a3c1-42c5-a6a7-648dec93f77f nodeName:}" failed. No retries permitted until 2025-12-04 15:20:46.790963035 +0000 UTC m=+1097.677006766 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs") pod "openstack-operator-controller-manager-7b58c9d549-7lmqq" (UID: "6a246ded-a3c1-42c5-a6a7-648dec93f77f") : secret "metrics-server-cert" not found Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.300169 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk"] Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.310560 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpjmk\" (UniqueName: \"kubernetes.io/projected/6a246ded-a3c1-42c5-a6a7-648dec93f77f-kube-api-access-mpjmk\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.313619 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-947w4\" (UniqueName: \"kubernetes.io/projected/fa8a1267-46f8-4554-8a91-7389be265abd-kube-api-access-947w4\") pod \"watcher-operator-controller-manager-769dc69bc-f6jlm\" (UID: \"fa8a1267-46f8-4554-8a91-7389be265abd\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.369479 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95"] Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.391719 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7h64\" (UniqueName: \"kubernetes.io/projected/57d9b742-9429-43c6-8798-6813c321866f-kube-api-access-r7h64\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ffkxj\" (UID: \"57d9b742-9429-43c6-8798-6813c321866f\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.428709 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7h64\" (UniqueName: \"kubernetes.io/projected/57d9b742-9429-43c6-8798-6813c321866f-kube-api-access-r7h64\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ffkxj\" (UID: \"57d9b742-9429-43c6-8798-6813c321866f\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.431309 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.454221 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.471998 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95" event={"ID":"6d911452-36e0-4227-9068-4ed0b86f025c","Type":"ContainerStarted","Data":"12b0f9306875dc691e11945047cbdd16c66c3b5273008cb00d7dc309be582f2a"} Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.476284 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.494935 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4frldw\" (UID: \"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.496958 4946 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.497622 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert podName:e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6 nodeName:}" failed. No retries permitted until 2025-12-04 15:20:47.497599729 +0000 UTC m=+1098.383643370 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" (UID: "e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.502841 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk" event={"ID":"76e27cbb-fdb9-447e-983f-48b7dbe8d46d","Type":"ContainerStarted","Data":"15cd41a53d57ec45ed896086462e5beb9e08dfedde951be63b5b93399b0700b4"} Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.567054 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.771790 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh"] Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.817297 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert\") pod \"infra-operator-controller-manager-57548d458d-tpk4r\" (UID: \"421ad636-5eeb-4596-84c0-a0ca3cfbdef2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.817419 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:46 crc kubenswrapper[4946]: I1204 15:20:46.817514 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.817713 4946 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.817789 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs podName:6a246ded-a3c1-42c5-a6a7-648dec93f77f nodeName:}" failed. No retries permitted until 2025-12-04 15:20:47.81776921 +0000 UTC m=+1098.703812851 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs") pod "openstack-operator-controller-manager-7b58c9d549-7lmqq" (UID: "6a246ded-a3c1-42c5-a6a7-648dec93f77f") : secret "metrics-server-cert" not found Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.818299 4946 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.818448 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs podName:6a246ded-a3c1-42c5-a6a7-648dec93f77f nodeName:}" failed. No retries permitted until 2025-12-04 15:20:47.818414167 +0000 UTC m=+1098.704457808 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs") pod "openstack-operator-controller-manager-7b58c9d549-7lmqq" (UID: "6a246ded-a3c1-42c5-a6a7-648dec93f77f") : secret "webhook-server-cert" not found Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.818314 4946 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 04 15:20:46 crc kubenswrapper[4946]: E1204 15:20:46.818522 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert podName:421ad636-5eeb-4596-84c0-a0ca3cfbdef2 nodeName:}" failed. No retries permitted until 2025-12-04 15:20:48.81851295 +0000 UTC m=+1099.704556811 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert") pod "infra-operator-controller-manager-57548d458d-tpk4r" (UID: "421ad636-5eeb-4596-84c0-a0ca3cfbdef2") : secret "infra-operator-webhook-server-cert" not found Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.239419 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv"] Dec 04 15:20:47 crc kubenswrapper[4946]: W1204 15:20:47.243608 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae1dfef3_ccf2_4ac3_986e_77c23bddcdb5.slice/crio-841c38670d5515f43329482625225abb6639b58d7c3607bd9672246ff295cd77 WatchSource:0}: Error finding container 841c38670d5515f43329482625225abb6639b58d7c3607bd9672246ff295cd77: Status 404 returned error can't find the container with id 841c38670d5515f43329482625225abb6639b58d7c3607bd9672246ff295cd77 Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.288140 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw"] Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.298790 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z"] Dec 04 15:20:47 crc kubenswrapper[4946]: W1204 15:20:47.333073 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9584ac77_41db_4621_a720_88b7c107ffa2.slice/crio-406302f13e6b896138801770e4ce0d455a2bfb699512d0893ebba7f558a1bb92 WatchSource:0}: Error finding container 406302f13e6b896138801770e4ce0d455a2bfb699512d0893ebba7f558a1bb92: Status 404 returned error can't find the container with id 406302f13e6b896138801770e4ce0d455a2bfb699512d0893ebba7f558a1bb92 Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.348452 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk"] Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.380056 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75"] Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.392281 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw"] Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.402269 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2"] Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.408050 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch"] Dec 04 15:20:47 crc kubenswrapper[4946]: W1204 15:20:47.415552 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41b063f1_7646_49dc_85e4_9e7185220de1.slice/crio-a0f0a903e3dd671bc13d8f983306a3b9546803f76d185e300f335033a4b454f1 WatchSource:0}: Error finding container a0f0a903e3dd671bc13d8f983306a3b9546803f76d185e300f335033a4b454f1: Status 404 returned error can't find the container with id a0f0a903e3dd671bc13d8f983306a3b9546803f76d185e300f335033a4b454f1 Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.418845 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-77ss8"] Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.424697 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j"] Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.512347 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw" event={"ID":"db2d87e7-4cf3-4d0d-b77e-2d02a073872c","Type":"ContainerStarted","Data":"6f496abef81f1caefcd82114ae1fda05b034c1f87c4d28a27d299d284443a2c2"} Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.513672 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" event={"ID":"d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f","Type":"ContainerStarted","Data":"0add19410e17571e501c4ccd9a1e9a8d4e709da7bedbc6da467625fd5bc2c775"} Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.515195 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" event={"ID":"ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5","Type":"ContainerStarted","Data":"841c38670d5515f43329482625225abb6639b58d7c3607bd9672246ff295cd77"} Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.516701 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75" event={"ID":"52d7003e-8315-49b6-b086-f0655f555960","Type":"ContainerStarted","Data":"ac01a85eaf97b0cee60d87b301d1df64fd5b985941ac806e49996b6c6432939f"} Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.518587 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-77ss8" event={"ID":"262aaccf-cdc8-44b6-8fc6-8702491cfad8","Type":"ContainerStarted","Data":"4977bbab5547e2c28b2ec9a4a9640f2c1db966f1773018e333ca1fd05d5f9acf"} Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.519894 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" event={"ID":"10b2d29b-4444-4dfe-ad8f-ad913798df88","Type":"ContainerStarted","Data":"b10c6088f0180d9dc9fc529d6c3ad7b02c28b4fe3c89527ca6c3d806dc05a6c8"} Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.521162 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" event={"ID":"a69ef7eb-6ffc-47cb-b7ee-7c46734d0857","Type":"ContainerStarted","Data":"bb014b0f8556472be320f36cb4ea421e1b2026c4d2da663fd4edb84d72f914c9"} Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.522765 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" event={"ID":"965366ad-4bb5-424a-9cf0-d09c42dec244","Type":"ContainerStarted","Data":"2900090e06dfb25e81bf746c6b58d1185bad73d6b211e451645a7527d4d34698"} Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.525290 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2" event={"ID":"866cf896-d679-426b-80d9-de7a368958ed","Type":"ContainerStarted","Data":"ff6f3fd3fde20a283a53c92ab72a185ccada0993ab1aaaf7be939cf0ef2251bb"} Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.526708 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk" event={"ID":"9584ac77-41db-4621-a720-88b7c107ffa2","Type":"ContainerStarted","Data":"406302f13e6b896138801770e4ce0d455a2bfb699512d0893ebba7f558a1bb92"} Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.527863 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j" event={"ID":"41b063f1-7646-49dc-85e4-9e7185220de1","Type":"ContainerStarted","Data":"a0f0a903e3dd671bc13d8f983306a3b9546803f76d185e300f335033a4b454f1"} Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.540261 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4frldw\" (UID: \"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.540452 4946 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.540566 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert podName:e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6 nodeName:}" failed. No retries permitted until 2025-12-04 15:20:49.540536554 +0000 UTC m=+1100.426580205 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" (UID: "e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.624694 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh"] Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.642953 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf"] Dec 04 15:20:47 crc kubenswrapper[4946]: W1204 15:20:47.650743 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4439c79c_3951_4b61_98ad_86f417432fde.slice/crio-b65a957b84e1cd91e86371c3394d930afc4122262d12e5230817f9517988a4e8 WatchSource:0}: Error finding container b65a957b84e1cd91e86371c3394d930afc4122262d12e5230817f9517988a4e8: Status 404 returned error can't find the container with id b65a957b84e1cd91e86371c3394d930afc4122262d12e5230817f9517988a4e8 Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.662301 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj"] Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.664356 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r7h64,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-ffkxj_openstack-operators(57d9b742-9429-43c6-8798-6813c321866f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.665492 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj" podUID="57d9b742-9429-43c6-8798-6813c321866f" Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.677990 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl"] Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.691983 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7"] Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.693694 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jf4hr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-25vwl_openstack-operators(b7a5eb4e-a8b8-43e5-95cf-51f40d454d79): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 15:20:47 crc kubenswrapper[4946]: W1204 15:20:47.696491 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9933077_41f3_425f_b478_c53691b7d817.slice/crio-184a255ee6a35a6d9e20a4d938e045802bc928db9c65dab6b5061c986f722cc2 WatchSource:0}: Error finding container 184a255ee6a35a6d9e20a4d938e045802bc928db9c65dab6b5061c986f722cc2: Status 404 returned error can't find the container with id 184a255ee6a35a6d9e20a4d938e045802bc928db9c65dab6b5061c986f722cc2 Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.700627 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jf4hr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-25vwl_openstack-operators(b7a5eb4e-a8b8-43e5-95cf-51f40d454d79): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.701815 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm"] Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.701885 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" podUID="b7a5eb4e-a8b8-43e5-95cf-51f40d454d79" Dec 04 15:20:47 crc kubenswrapper[4946]: W1204 15:20:47.703913 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa8a1267_46f8_4554_8a91_7389be265abd.slice/crio-ff5851088921a12e236c95ca6c4e0e9c0deb44d7274ce275a7015833afd89675 WatchSource:0}: Error finding container ff5851088921a12e236c95ca6c4e0e9c0deb44d7274ce275a7015833afd89675: Status 404 returned error can't find the container with id ff5851088921a12e236c95ca6c4e0e9c0deb44d7274ce275a7015833afd89675 Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.704886 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.213:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qp8g5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5d9cf8555c-csjn7_openstack-operators(c9933077-41f3-425f-b478-c53691b7d817): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.707370 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-947w4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-f6jlm_openstack-operators(fa8a1267-46f8-4554-8a91-7389be265abd): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.707488 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qp8g5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5d9cf8555c-csjn7_openstack-operators(c9933077-41f3-425f-b478-c53691b7d817): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.709264 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" podUID="c9933077-41f3-425f-b478-c53691b7d817" Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.710061 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-947w4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-f6jlm_openstack-operators(fa8a1267-46f8-4554-8a91-7389be265abd): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.711248 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" podUID="fa8a1267-46f8-4554-8a91-7389be265abd" Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.849356 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.849471 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.849601 4946 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.849618 4946 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.849658 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs podName:6a246ded-a3c1-42c5-a6a7-648dec93f77f nodeName:}" failed. No retries permitted until 2025-12-04 15:20:49.849643592 +0000 UTC m=+1100.735687223 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs") pod "openstack-operator-controller-manager-7b58c9d549-7lmqq" (UID: "6a246ded-a3c1-42c5-a6a7-648dec93f77f") : secret "webhook-server-cert" not found Dec 04 15:20:47 crc kubenswrapper[4946]: E1204 15:20:47.849691 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs podName:6a246ded-a3c1-42c5-a6a7-648dec93f77f nodeName:}" failed. No retries permitted until 2025-12-04 15:20:49.849672962 +0000 UTC m=+1100.735716603 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs") pod "openstack-operator-controller-manager-7b58c9d549-7lmqq" (UID: "6a246ded-a3c1-42c5-a6a7-648dec93f77f") : secret "metrics-server-cert" not found Dec 04 15:20:47 crc kubenswrapper[4946]: I1204 15:20:47.912217 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr"] Dec 04 15:20:47 crc kubenswrapper[4946]: W1204 15:20:47.913940 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod110a7ea7_4b02_4f5d_be16_87c4f0090eec.slice/crio-0cba9e4142af26cb65fc2678bd77702b292e13820ee40fccda3c6276ba8f4f7c WatchSource:0}: Error finding container 0cba9e4142af26cb65fc2678bd77702b292e13820ee40fccda3c6276ba8f4f7c: Status 404 returned error can't find the container with id 0cba9e4142af26cb65fc2678bd77702b292e13820ee40fccda3c6276ba8f4f7c Dec 04 15:20:48 crc kubenswrapper[4946]: I1204 15:20:48.544040 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" event={"ID":"110a7ea7-4b02-4f5d-be16-87c4f0090eec","Type":"ContainerStarted","Data":"0cba9e4142af26cb65fc2678bd77702b292e13820ee40fccda3c6276ba8f4f7c"} Dec 04 15:20:48 crc kubenswrapper[4946]: I1204 15:20:48.546800 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf" event={"ID":"4439c79c-3951-4b61-98ad-86f417432fde","Type":"ContainerStarted","Data":"b65a957b84e1cd91e86371c3394d930afc4122262d12e5230817f9517988a4e8"} Dec 04 15:20:48 crc kubenswrapper[4946]: I1204 15:20:48.552024 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj" event={"ID":"57d9b742-9429-43c6-8798-6813c321866f","Type":"ContainerStarted","Data":"e03c94cff01f61975f78eb0292a98d47a174992cc8474c97c4f1a3c5325cd3fe"} Dec 04 15:20:48 crc kubenswrapper[4946]: I1204 15:20:48.553850 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" event={"ID":"fa8a1267-46f8-4554-8a91-7389be265abd","Type":"ContainerStarted","Data":"ff5851088921a12e236c95ca6c4e0e9c0deb44d7274ce275a7015833afd89675"} Dec 04 15:20:48 crc kubenswrapper[4946]: E1204 15:20:48.556257 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj" podUID="57d9b742-9429-43c6-8798-6813c321866f" Dec 04 15:20:48 crc kubenswrapper[4946]: I1204 15:20:48.556288 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" event={"ID":"c9933077-41f3-425f-b478-c53691b7d817","Type":"ContainerStarted","Data":"184a255ee6a35a6d9e20a4d938e045802bc928db9c65dab6b5061c986f722cc2"} Dec 04 15:20:48 crc kubenswrapper[4946]: E1204 15:20:48.556969 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" podUID="fa8a1267-46f8-4554-8a91-7389be265abd" Dec 04 15:20:48 crc kubenswrapper[4946]: I1204 15:20:48.558233 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" event={"ID":"b7a5eb4e-a8b8-43e5-95cf-51f40d454d79","Type":"ContainerStarted","Data":"2d4ebe2f00a6d8d8046cb2e5551a231af2df6f7a1d4140f447241ccc4d184863"} Dec 04 15:20:48 crc kubenswrapper[4946]: E1204 15:20:48.561545 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.213:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" podUID="c9933077-41f3-425f-b478-c53691b7d817" Dec 04 15:20:48 crc kubenswrapper[4946]: I1204 15:20:48.561890 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh" event={"ID":"c92477ee-92e6-4dca-af5d-9b0f44bcaf60","Type":"ContainerStarted","Data":"0fd913613ed104c7154a992d0cbc6449d55341388be22b6412d1ff492ef82065"} Dec 04 15:20:48 crc kubenswrapper[4946]: E1204 15:20:48.566798 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" podUID="b7a5eb4e-a8b8-43e5-95cf-51f40d454d79" Dec 04 15:20:48 crc kubenswrapper[4946]: I1204 15:20:48.868687 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert\") pod \"infra-operator-controller-manager-57548d458d-tpk4r\" (UID: \"421ad636-5eeb-4596-84c0-a0ca3cfbdef2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:20:48 crc kubenswrapper[4946]: E1204 15:20:48.869044 4946 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 04 15:20:48 crc kubenswrapper[4946]: E1204 15:20:48.869205 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert podName:421ad636-5eeb-4596-84c0-a0ca3cfbdef2 nodeName:}" failed. No retries permitted until 2025-12-04 15:20:52.869161885 +0000 UTC m=+1103.755205526 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert") pod "infra-operator-controller-manager-57548d458d-tpk4r" (UID: "421ad636-5eeb-4596-84c0-a0ca3cfbdef2") : secret "infra-operator-webhook-server-cert" not found Dec 04 15:20:49 crc kubenswrapper[4946]: E1204 15:20:49.576965 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj" podUID="57d9b742-9429-43c6-8798-6813c321866f" Dec 04 15:20:49 crc kubenswrapper[4946]: E1204 15:20:49.577545 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.213:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" podUID="c9933077-41f3-425f-b478-c53691b7d817" Dec 04 15:20:49 crc kubenswrapper[4946]: I1204 15:20:49.587473 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4frldw\" (UID: \"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:20:49 crc kubenswrapper[4946]: E1204 15:20:49.587653 4946 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 15:20:49 crc kubenswrapper[4946]: E1204 15:20:49.587726 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert podName:e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6 nodeName:}" failed. No retries permitted until 2025-12-04 15:20:53.587702806 +0000 UTC m=+1104.473746447 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" (UID: "e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 15:20:49 crc kubenswrapper[4946]: E1204 15:20:49.587977 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" podUID="fa8a1267-46f8-4554-8a91-7389be265abd" Dec 04 15:20:49 crc kubenswrapper[4946]: E1204 15:20:49.595602 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" podUID="b7a5eb4e-a8b8-43e5-95cf-51f40d454d79" Dec 04 15:20:49 crc kubenswrapper[4946]: I1204 15:20:49.916934 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:49 crc kubenswrapper[4946]: E1204 15:20:49.917337 4946 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 04 15:20:49 crc kubenswrapper[4946]: E1204 15:20:49.917717 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs podName:6a246ded-a3c1-42c5-a6a7-648dec93f77f nodeName:}" failed. No retries permitted until 2025-12-04 15:20:53.917679568 +0000 UTC m=+1104.803723209 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs") pod "openstack-operator-controller-manager-7b58c9d549-7lmqq" (UID: "6a246ded-a3c1-42c5-a6a7-648dec93f77f") : secret "webhook-server-cert" not found Dec 04 15:20:49 crc kubenswrapper[4946]: I1204 15:20:49.918055 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:49 crc kubenswrapper[4946]: E1204 15:20:49.918321 4946 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 04 15:20:49 crc kubenswrapper[4946]: E1204 15:20:49.918537 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs podName:6a246ded-a3c1-42c5-a6a7-648dec93f77f nodeName:}" failed. No retries permitted until 2025-12-04 15:20:53.91852369 +0000 UTC m=+1104.804567331 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs") pod "openstack-operator-controller-manager-7b58c9d549-7lmqq" (UID: "6a246ded-a3c1-42c5-a6a7-648dec93f77f") : secret "metrics-server-cert" not found Dec 04 15:20:52 crc kubenswrapper[4946]: I1204 15:20:52.891303 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert\") pod \"infra-operator-controller-manager-57548d458d-tpk4r\" (UID: \"421ad636-5eeb-4596-84c0-a0ca3cfbdef2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:20:52 crc kubenswrapper[4946]: E1204 15:20:52.891573 4946 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 04 15:20:52 crc kubenswrapper[4946]: E1204 15:20:52.891749 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert podName:421ad636-5eeb-4596-84c0-a0ca3cfbdef2 nodeName:}" failed. No retries permitted until 2025-12-04 15:21:00.891701441 +0000 UTC m=+1111.777745222 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert") pod "infra-operator-controller-manager-57548d458d-tpk4r" (UID: "421ad636-5eeb-4596-84c0-a0ca3cfbdef2") : secret "infra-operator-webhook-server-cert" not found Dec 04 15:20:53 crc kubenswrapper[4946]: I1204 15:20:53.603753 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4frldw\" (UID: \"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:20:53 crc kubenswrapper[4946]: E1204 15:20:53.603925 4946 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 15:20:53 crc kubenswrapper[4946]: E1204 15:20:53.604013 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert podName:e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6 nodeName:}" failed. No retries permitted until 2025-12-04 15:21:01.603993297 +0000 UTC m=+1112.490036938 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" (UID: "e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 15:20:54 crc kubenswrapper[4946]: I1204 15:20:54.010498 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:54 crc kubenswrapper[4946]: I1204 15:20:54.010707 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:20:54 crc kubenswrapper[4946]: E1204 15:20:54.010755 4946 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 04 15:20:54 crc kubenswrapper[4946]: E1204 15:20:54.010848 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs podName:6a246ded-a3c1-42c5-a6a7-648dec93f77f nodeName:}" failed. No retries permitted until 2025-12-04 15:21:02.010825776 +0000 UTC m=+1112.896869417 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs") pod "openstack-operator-controller-manager-7b58c9d549-7lmqq" (UID: "6a246ded-a3c1-42c5-a6a7-648dec93f77f") : secret "webhook-server-cert" not found Dec 04 15:20:54 crc kubenswrapper[4946]: E1204 15:20:54.010950 4946 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 04 15:20:54 crc kubenswrapper[4946]: E1204 15:20:54.011041 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs podName:6a246ded-a3c1-42c5-a6a7-648dec93f77f nodeName:}" failed. No retries permitted until 2025-12-04 15:21:02.011020871 +0000 UTC m=+1112.897064512 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs") pod "openstack-operator-controller-manager-7b58c9d549-7lmqq" (UID: "6a246ded-a3c1-42c5-a6a7-648dec93f77f") : secret "metrics-server-cert" not found Dec 04 15:21:00 crc kubenswrapper[4946]: I1204 15:21:00.922506 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert\") pod \"infra-operator-controller-manager-57548d458d-tpk4r\" (UID: \"421ad636-5eeb-4596-84c0-a0ca3cfbdef2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:21:00 crc kubenswrapper[4946]: I1204 15:21:00.931008 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/421ad636-5eeb-4596-84c0-a0ca3cfbdef2-cert\") pod \"infra-operator-controller-manager-57548d458d-tpk4r\" (UID: \"421ad636-5eeb-4596-84c0-a0ca3cfbdef2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:21:01 crc kubenswrapper[4946]: I1204 15:21:01.022163 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:21:01 crc kubenswrapper[4946]: I1204 15:21:01.635059 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4frldw\" (UID: \"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:21:01 crc kubenswrapper[4946]: I1204 15:21:01.638788 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4frldw\" (UID: \"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:21:01 crc kubenswrapper[4946]: I1204 15:21:01.722102 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:21:02 crc kubenswrapper[4946]: I1204 15:21:02.041035 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:21:02 crc kubenswrapper[4946]: I1204 15:21:02.041203 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:21:02 crc kubenswrapper[4946]: I1204 15:21:02.045087 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-webhook-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:21:02 crc kubenswrapper[4946]: I1204 15:21:02.045135 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6a246ded-a3c1-42c5-a6a7-648dec93f77f-metrics-certs\") pod \"openstack-operator-controller-manager-7b58c9d549-7lmqq\" (UID: \"6a246ded-a3c1-42c5-a6a7-648dec93f77f\") " pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:21:02 crc kubenswrapper[4946]: I1204 15:21:02.113786 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:21:09 crc kubenswrapper[4946]: E1204 15:21:09.938340 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85" Dec 04 15:21:09 crc kubenswrapper[4946]: E1204 15:21:09.939028 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ntxgw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-zdqwh_openstack-operators(d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:21:10 crc kubenswrapper[4946]: E1204 15:21:10.648311 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809" Dec 04 15:21:10 crc kubenswrapper[4946]: E1204 15:21:10.648559 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r95x8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-fsn5z_openstack-operators(a69ef7eb-6ffc-47cb-b7ee-7c46734d0857): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:21:11 crc kubenswrapper[4946]: E1204 15:21:11.190583 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 04 15:21:11 crc kubenswrapper[4946]: E1204 15:21:11.190765 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jntt9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-rdrpr_openstack-operators(110a7ea7-4b02-4f5d-be16-87c4f0090eec): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:21:12 crc kubenswrapper[4946]: E1204 15:21:12.009917 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 04 15:21:12 crc kubenswrapper[4946]: E1204 15:21:12.010232 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-65qnq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-bd6fv_openstack-operators(ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:21:12 crc kubenswrapper[4946]: E1204 15:21:12.764691 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 04 15:21:12 crc kubenswrapper[4946]: E1204 15:21:12.765284 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6pmc2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-vzpjw_openstack-operators(10b2d29b-4444-4dfe-ad8f-ad913798df88): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:21:15 crc kubenswrapper[4946]: E1204 15:21:15.721078 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 04 15:21:15 crc kubenswrapper[4946]: E1204 15:21:15.721835 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-c9g4l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-8nbch_openstack-operators(965366ad-4bb5-424a-9cf0-d09c42dec244): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:21:17 crc kubenswrapper[4946]: I1204 15:21:17.319983 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw"] Dec 04 15:21:17 crc kubenswrapper[4946]: I1204 15:21:17.628208 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq"] Dec 04 15:21:17 crc kubenswrapper[4946]: I1204 15:21:17.684613 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r"] Dec 04 15:21:17 crc kubenswrapper[4946]: I1204 15:21:17.798178 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95" event={"ID":"6d911452-36e0-4227-9068-4ed0b86f025c","Type":"ContainerStarted","Data":"be0992856b315952244fc356a76630412f67ee6cdcd4f0b70a525e2553b80754"} Dec 04 15:21:17 crc kubenswrapper[4946]: I1204 15:21:17.802059 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" event={"ID":"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6","Type":"ContainerStarted","Data":"fa2a0544fe6e8e3099b226bc531eb39f45db862b42fcf931b8d65548aaa2fdd7"} Dec 04 15:21:17 crc kubenswrapper[4946]: I1204 15:21:17.804846 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh" event={"ID":"c92477ee-92e6-4dca-af5d-9b0f44bcaf60","Type":"ContainerStarted","Data":"9d24c573f9a1dc739a9c93e26223977edbb62f7f51a35ad4c236f6781444a2c4"} Dec 04 15:21:17 crc kubenswrapper[4946]: I1204 15:21:17.807111 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk" event={"ID":"9584ac77-41db-4621-a720-88b7c107ffa2","Type":"ContainerStarted","Data":"7fa7f1f48db8f199db4eab8f1c79b86a659d06d8b41c4850715945996367a981"} Dec 04 15:21:17 crc kubenswrapper[4946]: I1204 15:21:17.808751 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-77ss8" event={"ID":"262aaccf-cdc8-44b6-8fc6-8702491cfad8","Type":"ContainerStarted","Data":"142ef61304a8d4b485858f6e7ecede74e392bf07f1d2275fc307621026eec4dc"} Dec 04 15:21:17 crc kubenswrapper[4946]: W1204 15:21:17.812487 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a246ded_a3c1_42c5_a6a7_648dec93f77f.slice/crio-1cbb60e91bf4aecf69e45cd71768e16f8ea3638fbae6175d60e09bf8a62b1351 WatchSource:0}: Error finding container 1cbb60e91bf4aecf69e45cd71768e16f8ea3638fbae6175d60e09bf8a62b1351: Status 404 returned error can't find the container with id 1cbb60e91bf4aecf69e45cd71768e16f8ea3638fbae6175d60e09bf8a62b1351 Dec 04 15:21:17 crc kubenswrapper[4946]: W1204 15:21:17.817425 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod421ad636_5eeb_4596_84c0_a0ca3cfbdef2.slice/crio-96fb483e7d3d976a78b6d463a8cd91e94bb8a48525d2bbf86f6bd4cffb3b746e WatchSource:0}: Error finding container 96fb483e7d3d976a78b6d463a8cd91e94bb8a48525d2bbf86f6bd4cffb3b746e: Status 404 returned error can't find the container with id 96fb483e7d3d976a78b6d463a8cd91e94bb8a48525d2bbf86f6bd4cffb3b746e Dec 04 15:21:18 crc kubenswrapper[4946]: I1204 15:21:18.820885 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" event={"ID":"421ad636-5eeb-4596-84c0-a0ca3cfbdef2","Type":"ContainerStarted","Data":"96fb483e7d3d976a78b6d463a8cd91e94bb8a48525d2bbf86f6bd4cffb3b746e"} Dec 04 15:21:18 crc kubenswrapper[4946]: I1204 15:21:18.832096 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j" event={"ID":"41b063f1-7646-49dc-85e4-9e7185220de1","Type":"ContainerStarted","Data":"0a31fdc2bae07ccae82241e1da9dc1313ac142fc2abf9e1cecca536ed70ca3bc"} Dec 04 15:21:18 crc kubenswrapper[4946]: I1204 15:21:18.833426 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2" event={"ID":"866cf896-d679-426b-80d9-de7a368958ed","Type":"ContainerStarted","Data":"a2174da4e826923589809382f634abe8c8dc974d9dbbc9fbc9ebe08529187fe2"} Dec 04 15:21:18 crc kubenswrapper[4946]: I1204 15:21:18.837328 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw" event={"ID":"db2d87e7-4cf3-4d0d-b77e-2d02a073872c","Type":"ContainerStarted","Data":"0b5d58fbbf53e07905add1d0f9d17427145437335bca758df08e07a8dae5b434"} Dec 04 15:21:18 crc kubenswrapper[4946]: I1204 15:21:18.847086 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" event={"ID":"6a246ded-a3c1-42c5-a6a7-648dec93f77f","Type":"ContainerStarted","Data":"1cbb60e91bf4aecf69e45cd71768e16f8ea3638fbae6175d60e09bf8a62b1351"} Dec 04 15:21:19 crc kubenswrapper[4946]: I1204 15:21:19.877965 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk" event={"ID":"76e27cbb-fdb9-447e-983f-48b7dbe8d46d","Type":"ContainerStarted","Data":"f5ffc150534ca270eba477fa53a9caed720ee96a5e8f5b34048fb22fa2889e55"} Dec 04 15:21:19 crc kubenswrapper[4946]: I1204 15:21:19.883896 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" event={"ID":"b7a5eb4e-a8b8-43e5-95cf-51f40d454d79","Type":"ContainerStarted","Data":"aaccb3b6a2adc512d568410f98962b01c75da4ce0d43cd9e9676026c1131a1f2"} Dec 04 15:21:19 crc kubenswrapper[4946]: I1204 15:21:19.892248 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf" event={"ID":"4439c79c-3951-4b61-98ad-86f417432fde","Type":"ContainerStarted","Data":"b44e7256a26233fd51dea70cfe482461e3c8d9afe4ab89f4e41585abdf640cdd"} Dec 04 15:21:19 crc kubenswrapper[4946]: I1204 15:21:19.894340 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75" event={"ID":"52d7003e-8315-49b6-b086-f0655f555960","Type":"ContainerStarted","Data":"6c516a67eaf66212be9bd6a2c86f4e29e7106b0586d247b3aace1ca834e5d883"} Dec 04 15:21:20 crc kubenswrapper[4946]: I1204 15:21:20.932824 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" event={"ID":"6a246ded-a3c1-42c5-a6a7-648dec93f77f","Type":"ContainerStarted","Data":"7cca2584157a75df3ee443c959d7345c401548e88dc0d497d2daf2e6a85f2869"} Dec 04 15:21:20 crc kubenswrapper[4946]: I1204 15:21:20.933224 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:21:20 crc kubenswrapper[4946]: I1204 15:21:20.942783 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj" event={"ID":"57d9b742-9429-43c6-8798-6813c321866f","Type":"ContainerStarted","Data":"42101bcc5d88cd80bd0bf2cbcfde9db6c93fcfc94d2e1fb8a1d42b32ff557406"} Dec 04 15:21:20 crc kubenswrapper[4946]: I1204 15:21:20.950114 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" event={"ID":"fa8a1267-46f8-4554-8a91-7389be265abd","Type":"ContainerStarted","Data":"ba981dcb8da50d9f5d834eddec6c389ee2a9f80f7c63145a09ffca16ac2df7f7"} Dec 04 15:21:20 crc kubenswrapper[4946]: I1204 15:21:20.995504 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ffkxj" podStartSLOduration=6.094585546 podStartE2EDuration="35.995485376s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.66413246 +0000 UTC m=+1098.550176101" lastFinishedPulling="2025-12-04 15:21:17.56503229 +0000 UTC m=+1128.451075931" observedRunningTime="2025-12-04 15:21:20.995143247 +0000 UTC m=+1131.881186888" watchObservedRunningTime="2025-12-04 15:21:20.995485376 +0000 UTC m=+1131.881529017" Dec 04 15:21:21 crc kubenswrapper[4946]: I1204 15:21:21.000108 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" podStartSLOduration=36.00009137 podStartE2EDuration="36.00009137s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:21:20.965607651 +0000 UTC m=+1131.851651292" watchObservedRunningTime="2025-12-04 15:21:21.00009137 +0000 UTC m=+1131.886135011" Dec 04 15:21:23 crc kubenswrapper[4946]: E1204 15:21:23.943655 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" podUID="d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f" Dec 04 15:21:23 crc kubenswrapper[4946]: I1204 15:21:23.985927 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" event={"ID":"d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f","Type":"ContainerStarted","Data":"2828d105ce29b5f7675d7eb05b59216322fa7b22d3d4c85875a795310b3bc818"} Dec 04 15:21:23 crc kubenswrapper[4946]: I1204 15:21:23.991162 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" event={"ID":"421ad636-5eeb-4596-84c0-a0ca3cfbdef2","Type":"ContainerStarted","Data":"719fb8a116ccf3d91cab94188d26bfe2e3916fd250458491fcb6ad34d8c710c3"} Dec 04 15:21:24 crc kubenswrapper[4946]: I1204 15:21:23.999761 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" event={"ID":"c9933077-41f3-425f-b478-c53691b7d817","Type":"ContainerStarted","Data":"0c4423ece88dd5c698ff98c40dabfa61ca1415784650d27ec41b65bdf4227f6b"} Dec 04 15:21:24 crc kubenswrapper[4946]: I1204 15:21:23.999797 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" event={"ID":"c9933077-41f3-425f-b478-c53691b7d817","Type":"ContainerStarted","Data":"f6ad0ac0c1659efcf2f4e8e4be3156a62cd5bc94332a3b6eb51fd64063ad8852"} Dec 04 15:21:24 crc kubenswrapper[4946]: I1204 15:21:24.000747 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" Dec 04 15:21:24 crc kubenswrapper[4946]: I1204 15:21:24.038035 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" podStartSLOduration=9.185156470999999 podStartE2EDuration="39.038017211s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.704704769 +0000 UTC m=+1098.590748410" lastFinishedPulling="2025-12-04 15:21:17.557565509 +0000 UTC m=+1128.443609150" observedRunningTime="2025-12-04 15:21:24.034922478 +0000 UTC m=+1134.920966119" watchObservedRunningTime="2025-12-04 15:21:24.038017211 +0000 UTC m=+1134.924060842" Dec 04 15:21:24 crc kubenswrapper[4946]: E1204 15:21:24.082334 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" podUID="10b2d29b-4444-4dfe-ad8f-ad913798df88" Dec 04 15:21:24 crc kubenswrapper[4946]: E1204 15:21:24.209907 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" podUID="a69ef7eb-6ffc-47cb-b7ee-7c46734d0857" Dec 04 15:21:24 crc kubenswrapper[4946]: E1204 15:21:24.450914 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" podUID="965366ad-4bb5-424a-9cf0-d09c42dec244" Dec 04 15:21:24 crc kubenswrapper[4946]: E1204 15:21:24.682088 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" podUID="110a7ea7-4b02-4f5d-be16-87c4f0090eec" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.013529 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" event={"ID":"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6","Type":"ContainerStarted","Data":"48c59b5696908e2a1e6f9e455db53b16127fb91d44ac045331322b4f924fb2cc"} Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.026219 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh" event={"ID":"c92477ee-92e6-4dca-af5d-9b0f44bcaf60","Type":"ContainerStarted","Data":"8c177f0bb68c369c2565ba858cebacd95d817369b35398fb6a298b907b012f01"} Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.026865 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.037506 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" event={"ID":"965366ad-4bb5-424a-9cf0-d09c42dec244","Type":"ContainerStarted","Data":"1adbbbb382e4e71fe7406cc3271271e73065b2e4603c1973a41a6950dc62285f"} Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.037930 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh" Dec 04 15:21:25 crc kubenswrapper[4946]: E1204 15:21:25.039340 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" podUID="965366ad-4bb5-424a-9cf0-d09c42dec244" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.049285 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk" event={"ID":"9584ac77-41db-4621-a720-88b7c107ffa2","Type":"ContainerStarted","Data":"8b3c7858d92c197befbb1bb58580d5556d7606330a13e6e1e0a97883b62f1497"} Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.050567 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.052260 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf" event={"ID":"4439c79c-3951-4b61-98ad-86f417432fde","Type":"ContainerStarted","Data":"34c66162ec7d6115fdfe8a0e9b6bdac87cfe530e37ff951ec095464960555c4b"} Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.053077 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.055236 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.055431 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.062417 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-77ss8" event={"ID":"262aaccf-cdc8-44b6-8fc6-8702491cfad8","Type":"ContainerStarted","Data":"1428cfec0d8a218275dcac5583bb128d73a448a04fc8b9c2a051e308625f3560"} Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.063098 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-77ss8" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.075462 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-77ss8" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.077315 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bqtnh" podStartSLOduration=4.26478327 podStartE2EDuration="40.077298326s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.626100259 +0000 UTC m=+1098.512143900" lastFinishedPulling="2025-12-04 15:21:23.438615315 +0000 UTC m=+1134.324658956" observedRunningTime="2025-12-04 15:21:25.059794295 +0000 UTC m=+1135.945837946" watchObservedRunningTime="2025-12-04 15:21:25.077298326 +0000 UTC m=+1135.963341977" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.084646 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" event={"ID":"10b2d29b-4444-4dfe-ad8f-ad913798df88","Type":"ContainerStarted","Data":"ecb9d3c54b25b6d6bbda74c579b07658e51b2e407289e9cb8ff334ba61f85104"} Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.103828 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-bnspk" podStartSLOduration=3.8831116039999998 podStartE2EDuration="40.10380007s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.356089551 +0000 UTC m=+1098.242133192" lastFinishedPulling="2025-12-04 15:21:23.576778017 +0000 UTC m=+1134.462821658" observedRunningTime="2025-12-04 15:21:25.097244594 +0000 UTC m=+1135.983288235" watchObservedRunningTime="2025-12-04 15:21:25.10380007 +0000 UTC m=+1135.989843711" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.106603 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95" event={"ID":"6d911452-36e0-4227-9068-4ed0b86f025c","Type":"ContainerStarted","Data":"2a6fd69f801c107e25ab3c7d59894aa66ec93afc8c262421ad1b47932ef858da"} Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.107682 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.109747 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" event={"ID":"110a7ea7-4b02-4f5d-be16-87c4f0090eec","Type":"ContainerStarted","Data":"6f60b5c11ae5c6ce25029c0714a9696babd365686b572bc6b0aa5adce0b1aae3"} Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.124266 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.129343 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" event={"ID":"a69ef7eb-6ffc-47cb-b7ee-7c46734d0857","Type":"ContainerStarted","Data":"7017088735d2f14216934b11f96bac18b497bcc2624fae4b8de0fee6f09e8c4c"} Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.156953 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2" event={"ID":"866cf896-d679-426b-80d9-de7a368958ed","Type":"ContainerStarted","Data":"3124cf088a8b9ea11ffd95b34653da520611c6616381f42f4bfda318014b977b"} Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.157254 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.168692 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.214884 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-tp7zf" podStartSLOduration=4.285811776 podStartE2EDuration="40.214849751s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.655022678 +0000 UTC m=+1098.541066329" lastFinishedPulling="2025-12-04 15:21:23.584060663 +0000 UTC m=+1134.470104304" observedRunningTime="2025-12-04 15:21:25.190432404 +0000 UTC m=+1136.076476045" watchObservedRunningTime="2025-12-04 15:21:25.214849751 +0000 UTC m=+1136.100893382" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.282813 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrwq2" podStartSLOduration=5.024678245 podStartE2EDuration="41.282516444s" podCreationTimestamp="2025-12-04 15:20:44 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.36133483 +0000 UTC m=+1098.247378471" lastFinishedPulling="2025-12-04 15:21:23.619173029 +0000 UTC m=+1134.505216670" observedRunningTime="2025-12-04 15:21:25.27900906 +0000 UTC m=+1136.165052701" watchObservedRunningTime="2025-12-04 15:21:25.282516444 +0000 UTC m=+1136.168560165" Dec 04 15:21:25 crc kubenswrapper[4946]: I1204 15:21:25.462183 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-hdr95" podStartSLOduration=4.311344122 podStartE2EDuration="41.462163024s" podCreationTimestamp="2025-12-04 15:20:44 +0000 UTC" firstStartedPulling="2025-12-04 15:20:46.256033828 +0000 UTC m=+1097.142077469" lastFinishedPulling="2025-12-04 15:21:23.40685273 +0000 UTC m=+1134.292896371" observedRunningTime="2025-12-04 15:21:25.435899066 +0000 UTC m=+1136.321942707" watchObservedRunningTime="2025-12-04 15:21:25.462163024 +0000 UTC m=+1136.348206665" Dec 04 15:21:25 crc kubenswrapper[4946]: E1204 15:21:25.560098 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" podUID="ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.168765 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" event={"ID":"421ad636-5eeb-4596-84c0-a0ca3cfbdef2","Type":"ContainerStarted","Data":"c1e55386e933a327574fd3196cf98cfea4fbf8244979d9db4a01e0dedd19f19d"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.169244 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.171152 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" event={"ID":"b7a5eb4e-a8b8-43e5-95cf-51f40d454d79","Type":"ContainerStarted","Data":"ed28faadbf8d31236ae646886f5ba762113c2a7b3a8a10d94a13c19be064b9a7"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.171375 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.172877 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" event={"ID":"ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5","Type":"ContainerStarted","Data":"70d2d66239519d59222e08c502dd279f3e7d3ed861855fd87be74416e9a5a4ed"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.176448 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.178335 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" event={"ID":"fa8a1267-46f8-4554-8a91-7389be265abd","Type":"ContainerStarted","Data":"ad29ac3bbd5c0bb03da71ab8d7f8f05583d3407f40b733b6d7566dd8afbf481b"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.178530 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.185848 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.187235 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" event={"ID":"10b2d29b-4444-4dfe-ad8f-ad913798df88","Type":"ContainerStarted","Data":"031fd2619ea9a2ef05da0d8c44b5eb676df98dd1764d8dd9e0b1848538e8e388"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.187369 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.190865 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j" event={"ID":"41b063f1-7646-49dc-85e4-9e7185220de1","Type":"ContainerStarted","Data":"bdf9ce6ba701f6614c5580a28c4f85d386192e7e322c66ecdc53e11f4d3e3872"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.191075 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.194263 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" event={"ID":"e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6","Type":"ContainerStarted","Data":"3dc6e2aa3fe6bcc52f02e3c4d44e8e308b17b42b1328a827a4f180b8569bbae9"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.194416 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.195717 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.196856 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" event={"ID":"110a7ea7-4b02-4f5d-be16-87c4f0090eec","Type":"ContainerStarted","Data":"3de23a44a1d314036c5680dbb05146b325f080a9477f3e76e38060f466dcb619"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.197540 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.199407 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" event={"ID":"a69ef7eb-6ffc-47cb-b7ee-7c46734d0857","Type":"ContainerStarted","Data":"a86f7f320ad1b557d0e09b50da1f26f1654908c7a8d8de87b80e5461a12c6f8b"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.199874 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.202753 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw" event={"ID":"db2d87e7-4cf3-4d0d-b77e-2d02a073872c","Type":"ContainerStarted","Data":"b158fa2e128cf80223171bb4b4c459ba26f1bdb72cef19eb4ca9a43f6785afe1"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.202947 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.205529 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75" event={"ID":"52d7003e-8315-49b6-b086-f0655f555960","Type":"ContainerStarted","Data":"6b73451f687664a0db8009b33fc99b4b4ac29b7f15486ae59af831ad8c186fee"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.206149 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.210304 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.212362 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk" event={"ID":"76e27cbb-fdb9-447e-983f-48b7dbe8d46d","Type":"ContainerStarted","Data":"9492bf3372478dfe83ab28fe9cc6ac119a3546c668609da97dae84e91fc2d4d8"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.213456 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" podStartSLOduration=35.626177598 podStartE2EDuration="41.213434129s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:21:17.820962255 +0000 UTC m=+1128.707005896" lastFinishedPulling="2025-12-04 15:21:23.408218786 +0000 UTC m=+1134.294262427" observedRunningTime="2025-12-04 15:21:26.210451249 +0000 UTC m=+1137.096494910" watchObservedRunningTime="2025-12-04 15:21:26.213434129 +0000 UTC m=+1137.099477770" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.216042 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-77ss8" podStartSLOduration=5.041284222 podStartE2EDuration="41.216028709s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.410394105 +0000 UTC m=+1098.296437746" lastFinishedPulling="2025-12-04 15:21:23.585138592 +0000 UTC m=+1134.471182233" observedRunningTime="2025-12-04 15:21:25.561873959 +0000 UTC m=+1136.447917600" watchObservedRunningTime="2025-12-04 15:21:26.216028709 +0000 UTC m=+1137.102072350" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.218108 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" event={"ID":"d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f","Type":"ContainerStarted","Data":"29574b44995922707ec7be879273a29bdfb92f4ec66dac2c0679df5eed3d24bf"} Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.219589 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.223759 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.279731 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-j8r75" podStartSLOduration=5.02968074 podStartE2EDuration="41.279711725s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.325425106 +0000 UTC m=+1098.211468747" lastFinishedPulling="2025-12-04 15:21:23.575456091 +0000 UTC m=+1134.461499732" observedRunningTime="2025-12-04 15:21:26.276336544 +0000 UTC m=+1137.162380185" watchObservedRunningTime="2025-12-04 15:21:26.279711725 +0000 UTC m=+1137.165755376" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.281276 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" podStartSLOduration=3.822261483 podStartE2EDuration="42.281263576s" podCreationTimestamp="2025-12-04 15:20:44 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.312898343 +0000 UTC m=+1098.198941984" lastFinishedPulling="2025-12-04 15:21:25.771900436 +0000 UTC m=+1136.657944077" observedRunningTime="2025-12-04 15:21:26.249440069 +0000 UTC m=+1137.135483710" watchObservedRunningTime="2025-12-04 15:21:26.281263576 +0000 UTC m=+1137.167307217" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.315281 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" podStartSLOduration=35.453481756 podStartE2EDuration="41.315256962s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:21:17.545068003 +0000 UTC m=+1128.431111644" lastFinishedPulling="2025-12-04 15:21:23.406843209 +0000 UTC m=+1134.292886850" observedRunningTime="2025-12-04 15:21:26.306979729 +0000 UTC m=+1137.193023380" watchObservedRunningTime="2025-12-04 15:21:26.315256962 +0000 UTC m=+1137.201300603" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.336302 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" podStartSLOduration=3.622216577 podStartE2EDuration="41.336283209s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.918281796 +0000 UTC m=+1098.804325447" lastFinishedPulling="2025-12-04 15:21:25.632348438 +0000 UTC m=+1136.518392079" observedRunningTime="2025-12-04 15:21:26.335172619 +0000 UTC m=+1137.221216260" watchObservedRunningTime="2025-12-04 15:21:26.336283209 +0000 UTC m=+1137.222326850" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.389597 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2wkbw" podStartSLOduration=5.116543554 podStartE2EDuration="41.389580604s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.312594415 +0000 UTC m=+1098.198638056" lastFinishedPulling="2025-12-04 15:21:23.585631465 +0000 UTC m=+1134.471675106" observedRunningTime="2025-12-04 15:21:26.363769809 +0000 UTC m=+1137.249813450" watchObservedRunningTime="2025-12-04 15:21:26.389580604 +0000 UTC m=+1137.275624245" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.389695 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-ptn6j" podStartSLOduration=5.2656096980000004 podStartE2EDuration="41.389692087s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.428347882 +0000 UTC m=+1098.314391523" lastFinishedPulling="2025-12-04 15:21:23.552430271 +0000 UTC m=+1134.438473912" observedRunningTime="2025-12-04 15:21:26.388917686 +0000 UTC m=+1137.274961327" watchObservedRunningTime="2025-12-04 15:21:26.389692087 +0000 UTC m=+1137.275735728" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.456314 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-f6jlm" podStartSLOduration=5.589134144 podStartE2EDuration="41.456295221s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.707255166 +0000 UTC m=+1098.593298807" lastFinishedPulling="2025-12-04 15:21:23.574416243 +0000 UTC m=+1134.460459884" observedRunningTime="2025-12-04 15:21:26.430330812 +0000 UTC m=+1137.316374473" watchObservedRunningTime="2025-12-04 15:21:26.456295221 +0000 UTC m=+1137.342338862" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.459535 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-25vwl" podStartSLOduration=5.533807049 podStartE2EDuration="41.459522398s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.69347761 +0000 UTC m=+1098.579521251" lastFinishedPulling="2025-12-04 15:21:23.619192959 +0000 UTC m=+1134.505236600" observedRunningTime="2025-12-04 15:21:26.452491239 +0000 UTC m=+1137.338534880" watchObservedRunningTime="2025-12-04 15:21:26.459522398 +0000 UTC m=+1137.345566039" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.518369 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" podStartSLOduration=3.240187427 podStartE2EDuration="41.518350583s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.352347632 +0000 UTC m=+1098.238391273" lastFinishedPulling="2025-12-04 15:21:25.630510788 +0000 UTC m=+1136.516554429" observedRunningTime="2025-12-04 15:21:26.514999833 +0000 UTC m=+1137.401043474" watchObservedRunningTime="2025-12-04 15:21:26.518350583 +0000 UTC m=+1137.404394224" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.544391 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk" podStartSLOduration=5.201282042 podStartE2EDuration="42.544372364s" podCreationTimestamp="2025-12-04 15:20:44 +0000 UTC" firstStartedPulling="2025-12-04 15:20:46.233072938 +0000 UTC m=+1097.119116569" lastFinishedPulling="2025-12-04 15:21:23.57616325 +0000 UTC m=+1134.462206891" observedRunningTime="2025-12-04 15:21:26.541694842 +0000 UTC m=+1137.427738483" watchObservedRunningTime="2025-12-04 15:21:26.544372364 +0000 UTC m=+1137.430416005" Dec 04 15:21:26 crc kubenswrapper[4946]: I1204 15:21:26.586253 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" podStartSLOduration=4.712270537 podStartE2EDuration="42.586236511s" podCreationTimestamp="2025-12-04 15:20:44 +0000 UTC" firstStartedPulling="2025-12-04 15:20:46.802627488 +0000 UTC m=+1097.688671129" lastFinishedPulling="2025-12-04 15:21:24.676593462 +0000 UTC m=+1135.562637103" observedRunningTime="2025-12-04 15:21:26.581975137 +0000 UTC m=+1137.468018778" watchObservedRunningTime="2025-12-04 15:21:26.586236511 +0000 UTC m=+1137.472280142" Dec 04 15:21:27 crc kubenswrapper[4946]: I1204 15:21:27.228933 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" event={"ID":"965366ad-4bb5-424a-9cf0-d09c42dec244","Type":"ContainerStarted","Data":"31cae3d08da1b735ac7fe22a82dd5474943aa39ea01d66e459472eaa529fef4e"} Dec 04 15:21:27 crc kubenswrapper[4946]: I1204 15:21:27.231185 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" event={"ID":"ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5","Type":"ContainerStarted","Data":"239ba3a37accd526a44dda07b2125e8ddd36c46cd2473bb67d01bff88ef174e7"} Dec 04 15:21:27 crc kubenswrapper[4946]: I1204 15:21:27.233529 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" Dec 04 15:21:27 crc kubenswrapper[4946]: I1204 15:21:27.233580 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk" Dec 04 15:21:27 crc kubenswrapper[4946]: I1204 15:21:27.234239 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-29dnk" Dec 04 15:21:27 crc kubenswrapper[4946]: I1204 15:21:27.267721 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" podStartSLOduration=2.773856862 podStartE2EDuration="42.267696498s" podCreationTimestamp="2025-12-04 15:20:45 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.391922824 +0000 UTC m=+1098.277966465" lastFinishedPulling="2025-12-04 15:21:26.88576246 +0000 UTC m=+1137.771806101" observedRunningTime="2025-12-04 15:21:27.25997328 +0000 UTC m=+1138.146016921" watchObservedRunningTime="2025-12-04 15:21:27.267696498 +0000 UTC m=+1138.153740139" Dec 04 15:21:27 crc kubenswrapper[4946]: I1204 15:21:27.300846 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" podStartSLOduration=3.643618082 podStartE2EDuration="43.30082049s" podCreationTimestamp="2025-12-04 15:20:44 +0000 UTC" firstStartedPulling="2025-12-04 15:20:47.254340896 +0000 UTC m=+1098.140384527" lastFinishedPulling="2025-12-04 15:21:26.911543294 +0000 UTC m=+1137.797586935" observedRunningTime="2025-12-04 15:21:27.287140472 +0000 UTC m=+1138.173184113" watchObservedRunningTime="2025-12-04 15:21:27.30082049 +0000 UTC m=+1138.186864131" Dec 04 15:21:28 crc kubenswrapper[4946]: I1204 15:21:28.248399 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" Dec 04 15:21:31 crc kubenswrapper[4946]: I1204 15:21:31.032456 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-tpk4r" Dec 04 15:21:31 crc kubenswrapper[4946]: I1204 15:21:31.729013 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4frldw" Dec 04 15:21:32 crc kubenswrapper[4946]: I1204 15:21:32.121052 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7b58c9d549-7lmqq" Dec 04 15:21:35 crc kubenswrapper[4946]: I1204 15:21:35.286190 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-zdqwh" Dec 04 15:21:35 crc kubenswrapper[4946]: I1204 15:21:35.363463 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-bd6fv" Dec 04 15:21:35 crc kubenswrapper[4946]: I1204 15:21:35.407467 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fsn5z" Dec 04 15:21:35 crc kubenswrapper[4946]: I1204 15:21:35.605849 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-8nbch" Dec 04 15:21:36 crc kubenswrapper[4946]: I1204 15:21:36.063343 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-vzpjw" Dec 04 15:21:36 crc kubenswrapper[4946]: I1204 15:21:36.208790 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-rdrpr" Dec 04 15:21:36 crc kubenswrapper[4946]: I1204 15:21:36.434335 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5d9cf8555c-csjn7" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.208885 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-c5fz8"] Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.212539 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.218109 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.218501 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-9f5t8" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.218192 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.218279 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.234583 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-c5fz8"] Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.320410 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-config\") pod \"dnsmasq-dns-675f4bcbfc-c5fz8\" (UID: \"31b254b2-a6bc-4ee5-aa20-7dc8f577b114\") " pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.320515 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt4pm\" (UniqueName: \"kubernetes.io/projected/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-kube-api-access-mt4pm\") pod \"dnsmasq-dns-675f4bcbfc-c5fz8\" (UID: \"31b254b2-a6bc-4ee5-aa20-7dc8f577b114\") " pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.341176 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h9fbt"] Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.342989 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.347036 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.369634 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h9fbt"] Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.422085 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-config\") pod \"dnsmasq-dns-675f4bcbfc-c5fz8\" (UID: \"31b254b2-a6bc-4ee5-aa20-7dc8f577b114\") " pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.422381 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-config\") pod \"dnsmasq-dns-78dd6ddcc-h9fbt\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.422561 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt4pm\" (UniqueName: \"kubernetes.io/projected/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-kube-api-access-mt4pm\") pod \"dnsmasq-dns-675f4bcbfc-c5fz8\" (UID: \"31b254b2-a6bc-4ee5-aa20-7dc8f577b114\") " pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.423241 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-config\") pod \"dnsmasq-dns-675f4bcbfc-c5fz8\" (UID: \"31b254b2-a6bc-4ee5-aa20-7dc8f577b114\") " pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.464296 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt4pm\" (UniqueName: \"kubernetes.io/projected/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-kube-api-access-mt4pm\") pod \"dnsmasq-dns-675f4bcbfc-c5fz8\" (UID: \"31b254b2-a6bc-4ee5-aa20-7dc8f577b114\") " pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.478790 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.478887 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.524081 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-h9fbt\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.524190 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-config\") pod \"dnsmasq-dns-78dd6ddcc-h9fbt\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.524445 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2xcf\" (UniqueName: \"kubernetes.io/projected/3c111666-b29d-4cfa-988b-f216dae8a8b0-kube-api-access-k2xcf\") pod \"dnsmasq-dns-78dd6ddcc-h9fbt\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.525005 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-config\") pod \"dnsmasq-dns-78dd6ddcc-h9fbt\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.535266 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.625556 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-h9fbt\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.626061 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2xcf\" (UniqueName: \"kubernetes.io/projected/3c111666-b29d-4cfa-988b-f216dae8a8b0-kube-api-access-k2xcf\") pod \"dnsmasq-dns-78dd6ddcc-h9fbt\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.626420 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-h9fbt\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.644253 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2xcf\" (UniqueName: \"kubernetes.io/projected/3c111666-b29d-4cfa-988b-f216dae8a8b0-kube-api-access-k2xcf\") pod \"dnsmasq-dns-78dd6ddcc-h9fbt\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:21:52 crc kubenswrapper[4946]: I1204 15:21:52.666480 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:21:53 crc kubenswrapper[4946]: I1204 15:21:53.092288 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-c5fz8"] Dec 04 15:21:53 crc kubenswrapper[4946]: W1204 15:21:53.225417 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c111666_b29d_4cfa_988b_f216dae8a8b0.slice/crio-5833bcfb9098f1bdb8a30eb44681edf17e4496bb512acfe916aee93be5b472a0 WatchSource:0}: Error finding container 5833bcfb9098f1bdb8a30eb44681edf17e4496bb512acfe916aee93be5b472a0: Status 404 returned error can't find the container with id 5833bcfb9098f1bdb8a30eb44681edf17e4496bb512acfe916aee93be5b472a0 Dec 04 15:21:53 crc kubenswrapper[4946]: I1204 15:21:53.230772 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h9fbt"] Dec 04 15:21:53 crc kubenswrapper[4946]: I1204 15:21:53.463380 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" event={"ID":"31b254b2-a6bc-4ee5-aa20-7dc8f577b114","Type":"ContainerStarted","Data":"66431a434ba417b8721060101c49ae976f3d937959e258c073dfc35b179fca3b"} Dec 04 15:21:53 crc kubenswrapper[4946]: I1204 15:21:53.474003 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" event={"ID":"3c111666-b29d-4cfa-988b-f216dae8a8b0","Type":"ContainerStarted","Data":"5833bcfb9098f1bdb8a30eb44681edf17e4496bb512acfe916aee93be5b472a0"} Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.450407 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-c5fz8"] Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.477138 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jbxrv"] Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.481661 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.499535 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jbxrv"] Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.569063 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-config\") pod \"dnsmasq-dns-666b6646f7-jbxrv\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.569188 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-dns-svc\") pod \"dnsmasq-dns-666b6646f7-jbxrv\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.569209 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82sp6\" (UniqueName: \"kubernetes.io/projected/de110676-babd-433c-b63e-cd66dc1bd512-kube-api-access-82sp6\") pod \"dnsmasq-dns-666b6646f7-jbxrv\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.673094 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-dns-svc\") pod \"dnsmasq-dns-666b6646f7-jbxrv\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.673306 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82sp6\" (UniqueName: \"kubernetes.io/projected/de110676-babd-433c-b63e-cd66dc1bd512-kube-api-access-82sp6\") pod \"dnsmasq-dns-666b6646f7-jbxrv\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.673475 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-config\") pod \"dnsmasq-dns-666b6646f7-jbxrv\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.674316 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-dns-svc\") pod \"dnsmasq-dns-666b6646f7-jbxrv\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.674582 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-config\") pod \"dnsmasq-dns-666b6646f7-jbxrv\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.727408 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82sp6\" (UniqueName: \"kubernetes.io/projected/de110676-babd-433c-b63e-cd66dc1bd512-kube-api-access-82sp6\") pod \"dnsmasq-dns-666b6646f7-jbxrv\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.810590 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.825386 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h9fbt"] Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.859440 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-tg6rs"] Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.861929 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.881124 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-tg6rs"] Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.981677 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-td6qb\" (UniqueName: \"kubernetes.io/projected/bc305935-18fd-43f0-b1de-b588b49ea299-kube-api-access-td6qb\") pod \"dnsmasq-dns-57d769cc4f-tg6rs\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.981767 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-tg6rs\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:21:55 crc kubenswrapper[4946]: I1204 15:21:55.981812 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-config\") pod \"dnsmasq-dns-57d769cc4f-tg6rs\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.093752 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-td6qb\" (UniqueName: \"kubernetes.io/projected/bc305935-18fd-43f0-b1de-b588b49ea299-kube-api-access-td6qb\") pod \"dnsmasq-dns-57d769cc4f-tg6rs\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.093800 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-tg6rs\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.093851 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-config\") pod \"dnsmasq-dns-57d769cc4f-tg6rs\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.094768 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-config\") pod \"dnsmasq-dns-57d769cc4f-tg6rs\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.095634 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-tg6rs\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.144165 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-td6qb\" (UniqueName: \"kubernetes.io/projected/bc305935-18fd-43f0-b1de-b588b49ea299-kube-api-access-td6qb\") pod \"dnsmasq-dns-57d769cc4f-tg6rs\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.200746 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.641234 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.644031 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.646281 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.646947 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.647667 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.647859 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.647943 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.648290 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.648314 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-9dtg8" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.663228 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.673931 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jbxrv"] Dec 04 15:21:56 crc kubenswrapper[4946]: W1204 15:21:56.691409 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde110676_babd_433c_b63e_cd66dc1bd512.slice/crio-b96ed6ca9c36e1b9c853a9027cb739cdd81e548a30e7dde1bfc48d692bc92965 WatchSource:0}: Error finding container b96ed6ca9c36e1b9c853a9027cb739cdd81e548a30e7dde1bfc48d692bc92965: Status 404 returned error can't find the container with id b96ed6ca9c36e1b9c853a9027cb739cdd81e548a30e7dde1bfc48d692bc92965 Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.753588 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8510755d-2baa-400e-9d96-253271d5105a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.753685 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-config-data\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.753711 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.755380 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.755415 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.755469 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xl67j\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-kube-api-access-xl67j\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.755501 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.755524 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/920eb4d8-3aa1-4141-9f65-647e275405e4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.755578 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/920eb4d8-3aa1-4141-9f65-647e275405e4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.755620 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.755666 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.823500 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-tg6rs"] Dec 04 15:21:56 crc kubenswrapper[4946]: W1204 15:21:56.833743 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc305935_18fd_43f0_b1de_b588b49ea299.slice/crio-4373e404a072f5520c12b9873f895cc8c3cf830bdf85fe14aa884d0276e0d1ec WatchSource:0}: Error finding container 4373e404a072f5520c12b9873f895cc8c3cf830bdf85fe14aa884d0276e0d1ec: Status 404 returned error can't find the container with id 4373e404a072f5520c12b9873f895cc8c3cf830bdf85fe14aa884d0276e0d1ec Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.858043 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xl67j\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-kube-api-access-xl67j\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.858597 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/920eb4d8-3aa1-4141-9f65-647e275405e4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.858624 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.859962 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/920eb4d8-3aa1-4141-9f65-647e275405e4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.860308 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.860446 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.860477 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.860587 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8510755d-2baa-400e-9d96-253271d5105a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.861184 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-config-data\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.861204 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.861254 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.861271 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.861626 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.861617 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.862564 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-config-data\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.862909 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.865922 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/920eb4d8-3aa1-4141-9f65-647e275405e4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.866924 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.869140 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.869198 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.869228 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8510755d-2baa-400e-9d96-253271d5105a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8ca06f9d5466f4e0c9ad816f0448639905c6d9d82260c4835e3d42492d756057/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.871004 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/920eb4d8-3aa1-4141-9f65-647e275405e4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.877979 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xl67j\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-kube-api-access-xl67j\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.911069 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8510755d-2baa-400e-9d96-253271d5105a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a\") pod \"rabbitmq-server-0\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " pod="openstack/rabbitmq-server-0" Dec 04 15:21:56 crc kubenswrapper[4946]: I1204 15:21:56.986500 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.079164 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.084840 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.091445 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.091760 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.091785 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.091910 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.092088 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.092881 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.097591 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.097714 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dn75w" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.268212 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.268325 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.268361 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxdz4\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-kube-api-access-kxdz4\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.268432 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f65583d1-046b-463a-9101-2074072a94f0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.268552 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.268617 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.268724 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f65583d1-046b-463a-9101-2074072a94f0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.268765 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.268861 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.268913 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.268979 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.372046 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.372209 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.372244 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.372271 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxdz4\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-kube-api-access-kxdz4\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.372298 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f65583d1-046b-463a-9101-2074072a94f0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.372334 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.372354 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.372378 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f65583d1-046b-463a-9101-2074072a94f0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.372404 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.372435 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.372461 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.373572 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.373924 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.374024 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.374370 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.374596 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.381413 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f65583d1-046b-463a-9101-2074072a94f0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.381521 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.387988 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.395555 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f65583d1-046b-463a-9101-2074072a94f0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.395993 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.396026 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6ef0e6db955580b30200c839876ec91ed17a86e82fc3a8ead692ab0769c689a7/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.400178 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxdz4\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-kube-api-access-kxdz4\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.457289 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\") pod \"rabbitmq-cell1-server-0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.559107 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.563606 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" event={"ID":"bc305935-18fd-43f0-b1de-b588b49ea299","Type":"ContainerStarted","Data":"4373e404a072f5520c12b9873f895cc8c3cf830bdf85fe14aa884d0276e0d1ec"} Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.566485 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" event={"ID":"de110676-babd-433c-b63e-cd66dc1bd512","Type":"ContainerStarted","Data":"b96ed6ca9c36e1b9c853a9027cb739cdd81e548a30e7dde1bfc48d692bc92965"} Dec 04 15:21:57 crc kubenswrapper[4946]: W1204 15:21:57.572437 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod920eb4d8_3aa1_4141_9f65_647e275405e4.slice/crio-270e8a017ba79e02cdf840f16e87e8cd6aa422619997e0f9261c49ed12f88f11 WatchSource:0}: Error finding container 270e8a017ba79e02cdf840f16e87e8cd6aa422619997e0f9261c49ed12f88f11: Status 404 returned error can't find the container with id 270e8a017ba79e02cdf840f16e87e8cd6aa422619997e0f9261c49ed12f88f11 Dec 04 15:21:57 crc kubenswrapper[4946]: I1204 15:21:57.740088 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.386773 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.428252 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.429485 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.432570 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-7kn64" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.432581 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.433732 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.433966 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.441073 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.455199 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.593961 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"920eb4d8-3aa1-4141-9f65-647e275405e4","Type":"ContainerStarted","Data":"270e8a017ba79e02cdf840f16e87e8cd6aa422619997e0f9261c49ed12f88f11"} Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.597396 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f65583d1-046b-463a-9101-2074072a94f0","Type":"ContainerStarted","Data":"d9d2d64d0095b5d509e0b3273ae122061e1b9b7af868b86d24864ef63a256202"} Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.609954 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk5pp\" (UniqueName: \"kubernetes.io/projected/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-kube-api-access-bk5pp\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.610031 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.610068 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-kolla-config\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.610132 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-83c57560-1e35-4c1a-abe7-5aaa49d8cd6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-83c57560-1e35-4c1a-abe7-5aaa49d8cd6b\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.610182 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.610252 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-operator-scripts\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.610339 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-config-data-generated\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.610375 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-config-data-default\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.711987 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-config-data-default\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.712109 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk5pp\" (UniqueName: \"kubernetes.io/projected/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-kube-api-access-bk5pp\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.712178 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.712215 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-kolla-config\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.712246 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-83c57560-1e35-4c1a-abe7-5aaa49d8cd6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-83c57560-1e35-4c1a-abe7-5aaa49d8cd6b\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.712272 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.712323 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-operator-scripts\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.712367 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-config-data-generated\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.712904 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-config-data-generated\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.713989 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-config-data-default\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.714353 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-kolla-config\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.723001 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-operator-scripts\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.724226 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.724269 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-83c57560-1e35-4c1a-abe7-5aaa49d8cd6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-83c57560-1e35-4c1a-abe7-5aaa49d8cd6b\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/27a810b16599ecdc2da91edeed50913718a150033b6d0a6cf420becfd71faafb/globalmount\"" pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.731974 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.732935 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.757388 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk5pp\" (UniqueName: \"kubernetes.io/projected/43d26c42-eba9-4e5c-bd2d-7cdf7074a176-kube-api-access-bk5pp\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:58 crc kubenswrapper[4946]: I1204 15:21:58.837503 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-83c57560-1e35-4c1a-abe7-5aaa49d8cd6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-83c57560-1e35-4c1a-abe7-5aaa49d8cd6b\") pod \"openstack-galera-0\" (UID: \"43d26c42-eba9-4e5c-bd2d-7cdf7074a176\") " pod="openstack/openstack-galera-0" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.055952 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.835381 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.839459 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.849692 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-896b8" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.850010 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.850487 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.850720 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.856167 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.941356 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/44a85e36-b029-4450-b8aa-11bf910d8139-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.941434 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b62c4baa-46d6-46d5-bcb0-8d7b5aa6f95a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b62c4baa-46d6-46d5-bcb0-8d7b5aa6f95a\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.941497 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/44a85e36-b029-4450-b8aa-11bf910d8139-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.941784 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44a85e36-b029-4450-b8aa-11bf910d8139-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.941834 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/44a85e36-b029-4450-b8aa-11bf910d8139-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.941880 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/44a85e36-b029-4450-b8aa-11bf910d8139-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.942149 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44a85e36-b029-4450-b8aa-11bf910d8139-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:21:59 crc kubenswrapper[4946]: I1204 15:21:59.942317 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jj6pl\" (UniqueName: \"kubernetes.io/projected/44a85e36-b029-4450-b8aa-11bf910d8139-kube-api-access-jj6pl\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.045061 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/44a85e36-b029-4450-b8aa-11bf910d8139-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.046620 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/44a85e36-b029-4450-b8aa-11bf910d8139-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.046732 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44a85e36-b029-4450-b8aa-11bf910d8139-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.046822 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jj6pl\" (UniqueName: \"kubernetes.io/projected/44a85e36-b029-4450-b8aa-11bf910d8139-kube-api-access-jj6pl\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.046974 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/44a85e36-b029-4450-b8aa-11bf910d8139-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.047026 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b62c4baa-46d6-46d5-bcb0-8d7b5aa6f95a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b62c4baa-46d6-46d5-bcb0-8d7b5aa6f95a\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.047110 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/44a85e36-b029-4450-b8aa-11bf910d8139-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.047198 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44a85e36-b029-4450-b8aa-11bf910d8139-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.050712 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44a85e36-b029-4450-b8aa-11bf910d8139-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.051661 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/44a85e36-b029-4450-b8aa-11bf910d8139-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.051951 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/44a85e36-b029-4450-b8aa-11bf910d8139-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.056217 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/44a85e36-b029-4450-b8aa-11bf910d8139-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.061931 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/44a85e36-b029-4450-b8aa-11bf910d8139-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.063070 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44a85e36-b029-4450-b8aa-11bf910d8139-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.063216 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.063290 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b62c4baa-46d6-46d5-bcb0-8d7b5aa6f95a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b62c4baa-46d6-46d5-bcb0-8d7b5aa6f95a\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3ebd6728d2787bd7c98d959c052b1102a6440cd0530e254d67cb17c3b8d7dafb/globalmount\"" pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.079844 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jj6pl\" (UniqueName: \"kubernetes.io/projected/44a85e36-b029-4450-b8aa-11bf910d8139-kube-api-access-jj6pl\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.132110 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b62c4baa-46d6-46d5-bcb0-8d7b5aa6f95a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b62c4baa-46d6-46d5-bcb0-8d7b5aa6f95a\") pod \"openstack-cell1-galera-0\" (UID: \"44a85e36-b029-4450-b8aa-11bf910d8139\") " pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.171107 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.174381 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.177545 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.178624 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.187443 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-kxmz7" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.190378 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.209291 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.251602 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/74261534-d493-4bb6-ac4f-e7196daaa71f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.251698 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqtmk\" (UniqueName: \"kubernetes.io/projected/74261534-d493-4bb6-ac4f-e7196daaa71f-kube-api-access-hqtmk\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.251728 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74261534-d493-4bb6-ac4f-e7196daaa71f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.251766 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/74261534-d493-4bb6-ac4f-e7196daaa71f-config-data\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.251827 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/74261534-d493-4bb6-ac4f-e7196daaa71f-kolla-config\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.353943 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/74261534-d493-4bb6-ac4f-e7196daaa71f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.354042 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqtmk\" (UniqueName: \"kubernetes.io/projected/74261534-d493-4bb6-ac4f-e7196daaa71f-kube-api-access-hqtmk\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.354064 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74261534-d493-4bb6-ac4f-e7196daaa71f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.354105 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/74261534-d493-4bb6-ac4f-e7196daaa71f-config-data\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.354256 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/74261534-d493-4bb6-ac4f-e7196daaa71f-kolla-config\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.355396 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/74261534-d493-4bb6-ac4f-e7196daaa71f-config-data\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.355425 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/74261534-d493-4bb6-ac4f-e7196daaa71f-kolla-config\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.365893 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74261534-d493-4bb6-ac4f-e7196daaa71f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.366500 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/74261534-d493-4bb6-ac4f-e7196daaa71f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.390392 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqtmk\" (UniqueName: \"kubernetes.io/projected/74261534-d493-4bb6-ac4f-e7196daaa71f-kube-api-access-hqtmk\") pod \"memcached-0\" (UID: \"74261534-d493-4bb6-ac4f-e7196daaa71f\") " pod="openstack/memcached-0" Dec 04 15:22:00 crc kubenswrapper[4946]: I1204 15:22:00.519894 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.007554 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.009663 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.016631 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-n6w5n" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.044940 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.115945 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssfmc\" (UniqueName: \"kubernetes.io/projected/f81b9295-1cdf-44a6-afef-1380c1e3cf54-kube-api-access-ssfmc\") pod \"kube-state-metrics-0\" (UID: \"f81b9295-1cdf-44a6-afef-1380c1e3cf54\") " pod="openstack/kube-state-metrics-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.217715 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssfmc\" (UniqueName: \"kubernetes.io/projected/f81b9295-1cdf-44a6-afef-1380c1e3cf54-kube-api-access-ssfmc\") pod \"kube-state-metrics-0\" (UID: \"f81b9295-1cdf-44a6-afef-1380c1e3cf54\") " pod="openstack/kube-state-metrics-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.281408 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssfmc\" (UniqueName: \"kubernetes.io/projected/f81b9295-1cdf-44a6-afef-1380c1e3cf54-kube-api-access-ssfmc\") pod \"kube-state-metrics-0\" (UID: \"f81b9295-1cdf-44a6-afef-1380c1e3cf54\") " pod="openstack/kube-state-metrics-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.337680 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.757551 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.760474 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.766013 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.766358 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.766561 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.766638 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.766555 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-xlg57" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.778934 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.930623 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kz7r\" (UniqueName: \"kubernetes.io/projected/a304ef91-9673-43d6-8b91-0ba511961217-kube-api-access-8kz7r\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.930874 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/a304ef91-9673-43d6-8b91-0ba511961217-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.931010 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a304ef91-9673-43d6-8b91-0ba511961217-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.931176 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a304ef91-9673-43d6-8b91-0ba511961217-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.931277 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/a304ef91-9673-43d6-8b91-0ba511961217-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.931312 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/a304ef91-9673-43d6-8b91-0ba511961217-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:02 crc kubenswrapper[4946]: I1204 15:22:02.931391 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a304ef91-9673-43d6-8b91-0ba511961217-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.033275 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/a304ef91-9673-43d6-8b91-0ba511961217-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.033346 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a304ef91-9673-43d6-8b91-0ba511961217-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.033380 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a304ef91-9673-43d6-8b91-0ba511961217-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.033408 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/a304ef91-9673-43d6-8b91-0ba511961217-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.033427 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/a304ef91-9673-43d6-8b91-0ba511961217-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.033453 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a304ef91-9673-43d6-8b91-0ba511961217-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.033501 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kz7r\" (UniqueName: \"kubernetes.io/projected/a304ef91-9673-43d6-8b91-0ba511961217-kube-api-access-8kz7r\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.034546 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/a304ef91-9673-43d6-8b91-0ba511961217-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.040419 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a304ef91-9673-43d6-8b91-0ba511961217-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.041441 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a304ef91-9673-43d6-8b91-0ba511961217-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.042094 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a304ef91-9673-43d6-8b91-0ba511961217-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.042427 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/a304ef91-9673-43d6-8b91-0ba511961217-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.042641 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/a304ef91-9673-43d6-8b91-0ba511961217-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.061681 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kz7r\" (UniqueName: \"kubernetes.io/projected/a304ef91-9673-43d6-8b91-0ba511961217-kube-api-access-8kz7r\") pod \"alertmanager-metric-storage-0\" (UID: \"a304ef91-9673-43d6-8b91-0ba511961217\") " pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.088326 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.443866 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.446644 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.451868 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.452064 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-rvtnm" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.452188 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.452368 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.452397 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.452499 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.506191 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.546718 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.546794 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f4481828-f464-47c6-a803-0c1962101efa-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.546883 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.546914 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f4481828-f464-47c6-a803-0c1962101efa-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.546965 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.547011 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.547043 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lcmg\" (UniqueName: \"kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-kube-api-access-5lcmg\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.547135 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-config\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.649218 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.649278 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f4481828-f464-47c6-a803-0c1962101efa-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.649317 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.649357 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.649393 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lcmg\" (UniqueName: \"kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-kube-api-access-5lcmg\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.649446 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-config\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.649516 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.649545 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f4481828-f464-47c6-a803-0c1962101efa-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.650557 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f4481828-f464-47c6-a803-0c1962101efa-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.656771 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.659149 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.659569 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.670784 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-config\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.674277 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f4481828-f464-47c6-a803-0c1962101efa-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.680817 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.681044 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lcmg\" (UniqueName: \"kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-kube-api-access-5lcmg\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.681168 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/68982753889d34acaa444cdcec2be2c562e2d871e8fc82af199886676f0b8e03/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.726395 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\") pod \"prometheus-metric-storage-0\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:03 crc kubenswrapper[4946]: I1204 15:22:03.814798 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.130580 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.135659 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.138961 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-rrxsh" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.140607 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.149247 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.149528 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.150104 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.154160 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.206788 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6906a68-0819-41bc-a3d8-2ac76e77b67f-config\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.206894 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c6906a68-0819-41bc-a3d8-2ac76e77b67f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.206951 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6906a68-0819-41bc-a3d8-2ac76e77b67f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.206993 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsgq9\" (UniqueName: \"kubernetes.io/projected/c6906a68-0819-41bc-a3d8-2ac76e77b67f-kube-api-access-vsgq9\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.207065 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6906a68-0819-41bc-a3d8-2ac76e77b67f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.207341 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6906a68-0819-41bc-a3d8-2ac76e77b67f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.207671 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6906a68-0819-41bc-a3d8-2ac76e77b67f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:06 crc kubenswrapper[4946]: I1204 15:22:06.207778 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4e1cf797-014e-45c5-8f93-3c26c86cd41b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4e1cf797-014e-45c5-8f93-3c26c86cd41b\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.311202 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6906a68-0819-41bc-a3d8-2ac76e77b67f-config\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.310056 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6906a68-0819-41bc-a3d8-2ac76e77b67f-config\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.312189 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c6906a68-0819-41bc-a3d8-2ac76e77b67f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.312350 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6906a68-0819-41bc-a3d8-2ac76e77b67f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.312407 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsgq9\" (UniqueName: \"kubernetes.io/projected/c6906a68-0819-41bc-a3d8-2ac76e77b67f-kube-api-access-vsgq9\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.312452 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6906a68-0819-41bc-a3d8-2ac76e77b67f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.312512 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6906a68-0819-41bc-a3d8-2ac76e77b67f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.312578 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6906a68-0819-41bc-a3d8-2ac76e77b67f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.312659 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4e1cf797-014e-45c5-8f93-3c26c86cd41b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4e1cf797-014e-45c5-8f93-3c26c86cd41b\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.312731 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c6906a68-0819-41bc-a3d8-2ac76e77b67f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.315610 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6906a68-0819-41bc-a3d8-2ac76e77b67f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.318567 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.318627 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4e1cf797-014e-45c5-8f93-3c26c86cd41b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4e1cf797-014e-45c5-8f93-3c26c86cd41b\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3be24790ccf7126c4c4ab7b429581cc68b4b07a78fca7abd357740df1a501d87/globalmount\"" pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.322351 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6906a68-0819-41bc-a3d8-2ac76e77b67f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.322377 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6906a68-0819-41bc-a3d8-2ac76e77b67f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.322904 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6906a68-0819-41bc-a3d8-2ac76e77b67f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.334160 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsgq9\" (UniqueName: \"kubernetes.io/projected/c6906a68-0819-41bc-a3d8-2ac76e77b67f-kube-api-access-vsgq9\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.354442 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4e1cf797-014e-45c5-8f93-3c26c86cd41b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4e1cf797-014e-45c5-8f93-3c26c86cd41b\") pod \"ovsdbserver-nb-0\" (UID: \"c6906a68-0819-41bc-a3d8-2ac76e77b67f\") " pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:06.489999 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.101762 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-qv4hw"] Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.105068 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.107631 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-fdvcl" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.107884 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.109102 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.120830 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-hc6tt"] Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.123912 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.137722 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qv4hw"] Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.166161 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-hc6tt"] Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.234373 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64k8z\" (UniqueName: \"kubernetes.io/projected/2734e466-178a-4344-bfac-9adb5e4492a7-kube-api-access-64k8z\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.234458 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2734e466-178a-4344-bfac-9adb5e4492a7-ovn-controller-tls-certs\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.234510 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2734e466-178a-4344-bfac-9adb5e4492a7-var-log-ovn\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.234698 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2734e466-178a-4344-bfac-9adb5e4492a7-var-run\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.234800 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gzbs\" (UniqueName: \"kubernetes.io/projected/9c369924-f384-4ca1-b3ac-e1b334790f15-kube-api-access-7gzbs\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.234842 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c369924-f384-4ca1-b3ac-e1b334790f15-scripts\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.234973 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2734e466-178a-4344-bfac-9adb5e4492a7-scripts\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.235025 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-var-run\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.235046 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2734e466-178a-4344-bfac-9adb5e4492a7-var-run-ovn\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.235062 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2734e466-178a-4344-bfac-9adb5e4492a7-combined-ca-bundle\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.235095 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-var-log\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.235172 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-etc-ovs\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.235245 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-var-lib\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337152 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2734e466-178a-4344-bfac-9adb5e4492a7-var-run\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337206 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gzbs\" (UniqueName: \"kubernetes.io/projected/9c369924-f384-4ca1-b3ac-e1b334790f15-kube-api-access-7gzbs\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337228 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c369924-f384-4ca1-b3ac-e1b334790f15-scripts\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337273 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2734e466-178a-4344-bfac-9adb5e4492a7-scripts\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337308 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-var-run\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337326 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2734e466-178a-4344-bfac-9adb5e4492a7-var-run-ovn\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337346 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2734e466-178a-4344-bfac-9adb5e4492a7-combined-ca-bundle\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337389 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-var-log\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337410 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-etc-ovs\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337450 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-var-lib\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337483 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64k8z\" (UniqueName: \"kubernetes.io/projected/2734e466-178a-4344-bfac-9adb5e4492a7-kube-api-access-64k8z\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337504 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2734e466-178a-4344-bfac-9adb5e4492a7-ovn-controller-tls-certs\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.337527 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2734e466-178a-4344-bfac-9adb5e4492a7-var-log-ovn\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.338646 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2734e466-178a-4344-bfac-9adb5e4492a7-var-log-ovn\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.338953 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2734e466-178a-4344-bfac-9adb5e4492a7-var-run\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.339142 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-var-log\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.339340 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-var-lib\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.339668 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-etc-ovs\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.340167 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c369924-f384-4ca1-b3ac-e1b334790f15-var-run\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.340279 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2734e466-178a-4344-bfac-9adb5e4492a7-var-run-ovn\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.342843 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c369924-f384-4ca1-b3ac-e1b334790f15-scripts\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.345132 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2734e466-178a-4344-bfac-9adb5e4492a7-scripts\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.347487 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2734e466-178a-4344-bfac-9adb5e4492a7-combined-ca-bundle\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.360336 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2734e466-178a-4344-bfac-9adb5e4492a7-ovn-controller-tls-certs\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.369301 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64k8z\" (UniqueName: \"kubernetes.io/projected/2734e466-178a-4344-bfac-9adb5e4492a7-kube-api-access-64k8z\") pod \"ovn-controller-qv4hw\" (UID: \"2734e466-178a-4344-bfac-9adb5e4492a7\") " pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.377192 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gzbs\" (UniqueName: \"kubernetes.io/projected/9c369924-f384-4ca1-b3ac-e1b334790f15-kube-api-access-7gzbs\") pod \"ovn-controller-ovs-hc6tt\" (UID: \"9c369924-f384-4ca1-b3ac-e1b334790f15\") " pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.436470 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:07 crc kubenswrapper[4946]: I1204 15:22:07.462269 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.369634 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.374185 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.383363 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-mmxxx" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.383692 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.390555 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.391008 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.393697 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.503214 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f89f1623-6a48-4db4-8059-940887046c8e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.503294 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f89f1623-6a48-4db4-8059-940887046c8e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.503621 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f89f1623-6a48-4db4-8059-940887046c8e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.503684 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f89f1623-6a48-4db4-8059-940887046c8e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.503737 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f89f1623-6a48-4db4-8059-940887046c8e-config\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.503864 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f89f1623-6a48-4db4-8059-940887046c8e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.503898 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b397d85c-4ec2-47eb-95ab-b2876e165d79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b397d85c-4ec2-47eb-95ab-b2876e165d79\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.503969 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rngw6\" (UniqueName: \"kubernetes.io/projected/f89f1623-6a48-4db4-8059-940887046c8e-kube-api-access-rngw6\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.611899 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f89f1623-6a48-4db4-8059-940887046c8e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.611976 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f89f1623-6a48-4db4-8059-940887046c8e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.612021 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f89f1623-6a48-4db4-8059-940887046c8e-config\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.613965 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f89f1623-6a48-4db4-8059-940887046c8e-config\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.612911 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f89f1623-6a48-4db4-8059-940887046c8e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.614081 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b397d85c-4ec2-47eb-95ab-b2876e165d79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b397d85c-4ec2-47eb-95ab-b2876e165d79\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.614533 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f89f1623-6a48-4db4-8059-940887046c8e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.615044 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rngw6\" (UniqueName: \"kubernetes.io/projected/f89f1623-6a48-4db4-8059-940887046c8e-kube-api-access-rngw6\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.615280 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f89f1623-6a48-4db4-8059-940887046c8e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.615790 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f89f1623-6a48-4db4-8059-940887046c8e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.616416 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f89f1623-6a48-4db4-8059-940887046c8e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.621833 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f89f1623-6a48-4db4-8059-940887046c8e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.622653 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.622691 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b397d85c-4ec2-47eb-95ab-b2876e165d79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b397d85c-4ec2-47eb-95ab-b2876e165d79\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2e5abd9c64751df610a5be15d527dcb2189bc7d7ad7f742f19d9bea5ecb5d997/globalmount\"" pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.629523 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f89f1623-6a48-4db4-8059-940887046c8e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.646258 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f89f1623-6a48-4db4-8059-940887046c8e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.652318 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rngw6\" (UniqueName: \"kubernetes.io/projected/f89f1623-6a48-4db4-8059-940887046c8e-kube-api-access-rngw6\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.693142 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b397d85c-4ec2-47eb-95ab-b2876e165d79\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b397d85c-4ec2-47eb-95ab-b2876e165d79\") pod \"ovsdbserver-sb-0\" (UID: \"f89f1623-6a48-4db4-8059-940887046c8e\") " pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:10 crc kubenswrapper[4946]: I1204 15:22:10.710262 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.805196 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c"] Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.808274 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.813984 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-http" Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.820242 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-config" Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.820516 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-grpc" Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.820608 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca-bundle" Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.820850 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-dockercfg-lm78x" Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.832735 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c"] Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.935887 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/b0adb62a-e125-4612-8e57-74bab154a2c4-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.936291 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0adb62a-e125-4612-8e57-74bab154a2c4-config\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.936411 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdbhl\" (UniqueName: \"kubernetes.io/projected/b0adb62a-e125-4612-8e57-74bab154a2c4-kube-api-access-gdbhl\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.936469 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0adb62a-e125-4612-8e57-74bab154a2c4-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:14 crc kubenswrapper[4946]: I1204 15:22:14.936495 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/b0adb62a-e125-4612-8e57-74bab154a2c4-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.007816 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.039336 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdbhl\" (UniqueName: \"kubernetes.io/projected/b0adb62a-e125-4612-8e57-74bab154a2c4-kube-api-access-gdbhl\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.039426 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0adb62a-e125-4612-8e57-74bab154a2c4-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.039461 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/b0adb62a-e125-4612-8e57-74bab154a2c4-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.039504 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/b0adb62a-e125-4612-8e57-74bab154a2c4-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.039553 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0adb62a-e125-4612-8e57-74bab154a2c4-config\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.041052 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0adb62a-e125-4612-8e57-74bab154a2c4-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.041258 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0adb62a-e125-4612-8e57-74bab154a2c4-config\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.041598 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq"] Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.043611 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.052837 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-grpc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.053064 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-loki-s3" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.053174 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/b0adb62a-e125-4612-8e57-74bab154a2c4-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.053257 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-http" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.063417 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdbhl\" (UniqueName: \"kubernetes.io/projected/b0adb62a-e125-4612-8e57-74bab154a2c4-kube-api-access-gdbhl\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.068949 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq"] Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.077048 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/b0adb62a-e125-4612-8e57-74bab154a2c4-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-664b687b54-76w8c\" (UID: \"b0adb62a-e125-4612-8e57-74bab154a2c4\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.141665 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdnbt\" (UniqueName: \"kubernetes.io/projected/02db9740-8e77-440b-95f9-6a2968cd39fe-kube-api-access-fdnbt\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.141782 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.141817 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.141852 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.141998 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.142033 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02db9740-8e77-440b-95f9-6a2968cd39fe-config\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.155434 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.201310 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn"] Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.203008 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.210869 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-http" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.211911 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-grpc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.234229 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn"] Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.244539 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.244617 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.244681 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.244722 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02db9740-8e77-440b-95f9-6a2968cd39fe-config\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.244760 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdnbt\" (UniqueName: \"kubernetes.io/projected/02db9740-8e77-440b-95f9-6a2968cd39fe-kube-api-access-fdnbt\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.244817 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.248270 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/02db9740-8e77-440b-95f9-6a2968cd39fe-config\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.248277 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.254134 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.258883 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.264437 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/02db9740-8e77-440b-95f9-6a2968cd39fe-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.294867 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdnbt\" (UniqueName: \"kubernetes.io/projected/02db9740-8e77-440b-95f9-6a2968cd39fe-kube-api-access-fdnbt\") pod \"cloudkitty-lokistack-querier-5467947bf7-wwslq\" (UID: \"02db9740-8e77-440b-95f9-6a2968cd39fe\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.347991 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55a6559d-165f-4fb0-ac08-a0ba07d02cac-config\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.348110 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v876f\" (UniqueName: \"kubernetes.io/projected/55a6559d-165f-4fb0-ac08-a0ba07d02cac-kube-api-access-v876f\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.348210 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/55a6559d-165f-4fb0-ac08-a0ba07d02cac-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.348323 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/55a6559d-165f-4fb0-ac08-a0ba07d02cac-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.348361 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/55a6559d-165f-4fb0-ac08-a0ba07d02cac-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.365578 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc"] Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.370220 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.380242 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.380522 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway-ca-bundle" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.380809 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-client-http" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.381030 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.381245 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-http" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.381616 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.417999 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc"] Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.439297 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6"] Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.441076 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.444938 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-dockercfg-bk688" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.449993 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.450387 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.450504 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.450572 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.450619 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/55a6559d-165f-4fb0-ac08-a0ba07d02cac-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.450657 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwps5\" (UniqueName: \"kubernetes.io/projected/c72f3e47-f551-4d7e-8978-cf453bc9a80d-kube-api-access-qwps5\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.450689 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/55a6559d-165f-4fb0-ac08-a0ba07d02cac-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.450710 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.450780 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.450914 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.451027 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55a6559d-165f-4fb0-ac08-a0ba07d02cac-config\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.451138 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v876f\" (UniqueName: \"kubernetes.io/projected/55a6559d-165f-4fb0-ac08-a0ba07d02cac-kube-api-access-v876f\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.451179 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.451247 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/55a6559d-165f-4fb0-ac08-a0ba07d02cac-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.451278 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.452906 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/55a6559d-165f-4fb0-ac08-a0ba07d02cac-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.453624 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55a6559d-165f-4fb0-ac08-a0ba07d02cac-config\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.466393 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/55a6559d-165f-4fb0-ac08-a0ba07d02cac-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.471036 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/55a6559d-165f-4fb0-ac08-a0ba07d02cac-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.476839 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6"] Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.481826 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v876f\" (UniqueName: \"kubernetes.io/projected/55a6559d-165f-4fb0-ac08-a0ba07d02cac-kube-api-access-v876f\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn\" (UID: \"55a6559d-165f-4fb0-ac08-a0ba07d02cac\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554344 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554498 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554587 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554631 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554672 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554698 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554727 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554751 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ktr7\" (UniqueName: \"kubernetes.io/projected/2ffa4fa2-c466-47f5-bca6-613ec9e52779-kube-api-access-8ktr7\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554784 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554813 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554835 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554858 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554876 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwps5\" (UniqueName: \"kubernetes.io/projected/c72f3e47-f551-4d7e-8978-cf453bc9a80d-kube-api-access-qwps5\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554901 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554921 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.554972 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.555015 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.555055 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.556829 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.557330 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: E1204 15:22:15.557944 4946 secret.go:188] Couldn't get secret openstack/cloudkitty-lokistack-gateway-http: secret "cloudkitty-lokistack-gateway-http" not found Dec 04 15:22:15 crc kubenswrapper[4946]: E1204 15:22:15.558040 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-tls-secret podName:c72f3e47-f551-4d7e-8978-cf453bc9a80d nodeName:}" failed. No retries permitted until 2025-12-04 15:22:16.058009133 +0000 UTC m=+1186.944052954 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-tls-secret") pod "cloudkitty-lokistack-gateway-bc75944f-c6jcc" (UID: "c72f3e47-f551-4d7e-8978-cf453bc9a80d") : secret "cloudkitty-lokistack-gateway-http" not found Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.558105 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.559544 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.560042 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/c72f3e47-f551-4d7e-8978-cf453bc9a80d-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.562458 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.563476 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.585414 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwps5\" (UniqueName: \"kubernetes.io/projected/c72f3e47-f551-4d7e-8978-cf453bc9a80d-kube-api-access-qwps5\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.656505 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.656612 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.656642 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ktr7\" (UniqueName: \"kubernetes.io/projected/2ffa4fa2-c466-47f5-bca6-613ec9e52779-kube-api-access-8ktr7\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.656686 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.656724 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.656752 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.656804 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.656828 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.656854 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.658212 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.658285 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: E1204 15:22:15.658432 4946 secret.go:188] Couldn't get secret openstack/cloudkitty-lokistack-gateway-http: secret "cloudkitty-lokistack-gateway-http" not found Dec 04 15:22:15 crc kubenswrapper[4946]: E1204 15:22:15.658505 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-tls-secret podName:2ffa4fa2-c466-47f5-bca6-613ec9e52779 nodeName:}" failed. No retries permitted until 2025-12-04 15:22:16.158477823 +0000 UTC m=+1187.044521464 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-tls-secret") pod "cloudkitty-lokistack-gateway-bc75944f-jtbm6" (UID: "2ffa4fa2-c466-47f5-bca6-613ec9e52779") : secret "cloudkitty-lokistack-gateway-http" not found Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.658598 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.658657 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.658899 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.662601 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.662737 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.663416 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.678279 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ktr7\" (UniqueName: \"kubernetes.io/projected/2ffa4fa2-c466-47f5-bca6-613ec9e52779-kube-api-access-8ktr7\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:15 crc kubenswrapper[4946]: I1204 15:22:15.998842 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.000670 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.005840 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-http" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.006091 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-grpc" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.026532 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.064436 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.064529 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.064768 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.065139 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/decd9bb2-7749-48ff-b886-74e49bf5222d-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.065215 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.065386 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.065716 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.065873 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.066075 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wcnd\" (UniqueName: \"kubernetes.io/projected/decd9bb2-7749-48ff-b886-74e49bf5222d-kube-api-access-9wcnd\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.069074 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/c72f3e47-f551-4d7e-8978-cf453bc9a80d-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-c6jcc\" (UID: \"c72f3e47-f551-4d7e-8978-cf453bc9a80d\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.145162 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.147177 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.155469 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-http" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.155712 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-grpc" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.167496 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.168546 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.168634 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.168700 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47583dfd-ecd6-41d8-ac98-748683cd0ae5-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.168740 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.168792 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.168833 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/decd9bb2-7749-48ff-b886-74e49bf5222d-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.168862 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.168893 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.168917 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.168972 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nljfw\" (UniqueName: \"kubernetes.io/projected/47583dfd-ecd6-41d8-ac98-748683cd0ae5-kube-api-access-nljfw\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.168993 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.169016 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.169051 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.169108 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wcnd\" (UniqueName: \"kubernetes.io/projected/decd9bb2-7749-48ff-b886-74e49bf5222d-kube-api-access-9wcnd\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.169159 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.169202 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.172291 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/decd9bb2-7749-48ff-b886-74e49bf5222d-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.172657 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.174777 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.175369 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.175416 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.175999 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2ffa4fa2-c466-47f5-bca6-613ec9e52779-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-jtbm6\" (UID: \"2ffa4fa2-c466-47f5-bca6-613ec9e52779\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.179397 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.185454 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/decd9bb2-7749-48ff-b886-74e49bf5222d-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.196536 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wcnd\" (UniqueName: \"kubernetes.io/projected/decd9bb2-7749-48ff-b886-74e49bf5222d-kube-api-access-9wcnd\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.202373 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.225693 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"decd9bb2-7749-48ff-b886-74e49bf5222d\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.242143 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.245288 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.247939 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-http" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.257077 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-grpc" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.265837 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.270928 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47583dfd-ecd6-41d8-ac98-748683cd0ae5-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.271005 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.271041 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.271089 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nljfw\" (UniqueName: \"kubernetes.io/projected/47583dfd-ecd6-41d8-ac98-748683cd0ae5-kube-api-access-nljfw\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.271133 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.271188 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.271228 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.271431 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.274834 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47583dfd-ecd6-41d8-ac98-748683cd0ae5-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.275609 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.276808 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.278313 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.280730 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/47583dfd-ecd6-41d8-ac98-748683cd0ae5-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.293607 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.293785 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nljfw\" (UniqueName: \"kubernetes.io/projected/47583dfd-ecd6-41d8-ac98-748683cd0ae5-kube-api-access-nljfw\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"47583dfd-ecd6-41d8-ac98-748683cd0ae5\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.326488 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.332151 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.373755 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwzd9\" (UniqueName: \"kubernetes.io/projected/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-kube-api-access-lwzd9\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.373825 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.373855 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.373915 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.373947 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.373983 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.374016 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.428785 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.476383 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.476442 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.476499 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.476620 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.477259 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.477331 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.477385 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.477528 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwzd9\" (UniqueName: \"kubernetes.io/projected/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-kube-api-access-lwzd9\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.478093 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.480291 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.481464 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.482516 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.496563 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwzd9\" (UniqueName: \"kubernetes.io/projected/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-kube-api-access-lwzd9\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.498387 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/b21846fc-0f45-4cae-aea6-b4e3f33ec03a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.515867 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"b21846fc-0f45-4cae-aea6-b4e3f33ec03a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.519995 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:16 crc kubenswrapper[4946]: I1204 15:22:16.653585 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:17 crc kubenswrapper[4946]: E1204 15:22:17.705355 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 04 15:22:17 crc kubenswrapper[4946]: E1204 15:22:17.705937 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mt4pm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-c5fz8_openstack(31b254b2-a6bc-4ee5-aa20-7dc8f577b114): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:22:17 crc kubenswrapper[4946]: E1204 15:22:17.707322 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" podUID="31b254b2-a6bc-4ee5-aa20-7dc8f577b114" Dec 04 15:22:17 crc kubenswrapper[4946]: E1204 15:22:17.764106 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 04 15:22:17 crc kubenswrapper[4946]: E1204 15:22:17.764673 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k2xcf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-h9fbt_openstack(3c111666-b29d-4cfa-988b-f216dae8a8b0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:22:17 crc kubenswrapper[4946]: E1204 15:22:17.765842 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" podUID="3c111666-b29d-4cfa-988b-f216dae8a8b0" Dec 04 15:22:17 crc kubenswrapper[4946]: I1204 15:22:17.867256 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"a304ef91-9673-43d6-8b91-0ba511961217","Type":"ContainerStarted","Data":"009e357eba2d48cc30b8b010814d4bc70ee97cf7accd83d8bcbb92461c0291c7"} Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.162730 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 04 15:22:18 crc kubenswrapper[4946]: W1204 15:22:18.242171 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4481828_f464_47c6_a803_0c1962101efa.slice/crio-9ec3f7c4eb651702a7d79bd330dea930543197ac1b7fa44a3917e6e050c70b00 WatchSource:0}: Error finding container 9ec3f7c4eb651702a7d79bd330dea930543197ac1b7fa44a3917e6e050c70b00: Status 404 returned error can't find the container with id 9ec3f7c4eb651702a7d79bd330dea930543197ac1b7fa44a3917e6e050c70b00 Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.320836 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.445423 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.547736 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mt4pm\" (UniqueName: \"kubernetes.io/projected/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-kube-api-access-mt4pm\") pod \"31b254b2-a6bc-4ee5-aa20-7dc8f577b114\" (UID: \"31b254b2-a6bc-4ee5-aa20-7dc8f577b114\") " Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.548331 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-config\") pod \"31b254b2-a6bc-4ee5-aa20-7dc8f577b114\" (UID: \"31b254b2-a6bc-4ee5-aa20-7dc8f577b114\") " Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.549046 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-config" (OuterVolumeSpecName: "config") pod "31b254b2-a6bc-4ee5-aa20-7dc8f577b114" (UID: "31b254b2-a6bc-4ee5-aa20-7dc8f577b114"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.549365 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.562988 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.564861 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-kube-api-access-mt4pm" (OuterVolumeSpecName: "kube-api-access-mt4pm") pod "31b254b2-a6bc-4ee5-aa20-7dc8f577b114" (UID: "31b254b2-a6bc-4ee5-aa20-7dc8f577b114"). InnerVolumeSpecName "kube-api-access-mt4pm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:22:18 crc kubenswrapper[4946]: W1204 15:22:18.576978 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43d26c42_eba9_4e5c_bd2d_7cdf7074a176.slice/crio-54710067d503f0767535c3402566a371fe3a8880fb8308b806d7df91f83dbb04 WatchSource:0}: Error finding container 54710067d503f0767535c3402566a371fe3a8880fb8308b806d7df91f83dbb04: Status 404 returned error can't find the container with id 54710067d503f0767535c3402566a371fe3a8880fb8308b806d7df91f83dbb04 Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.651255 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mt4pm\" (UniqueName: \"kubernetes.io/projected/31b254b2-a6bc-4ee5-aa20-7dc8f577b114-kube-api-access-mt4pm\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.879752 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 04 15:22:18 crc kubenswrapper[4946]: W1204 15:22:18.884680 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6906a68_0819_41bc_a3d8_2ac76e77b67f.slice/crio-1303c9e6e816ce9c4417127eec8acd68ecc41a23e81527d6bcc3a412d6b1af71 WatchSource:0}: Error finding container 1303c9e6e816ce9c4417127eec8acd68ecc41a23e81527d6bcc3a412d6b1af71: Status 404 returned error can't find the container with id 1303c9e6e816ce9c4417127eec8acd68ecc41a23e81527d6bcc3a412d6b1af71 Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.910963 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.911331 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-c5fz8" event={"ID":"31b254b2-a6bc-4ee5-aa20-7dc8f577b114","Type":"ContainerDied","Data":"66431a434ba417b8721060101c49ae976f3d937959e258c073dfc35b179fca3b"} Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.931445 4946 generic.go:334] "Generic (PLEG): container finished" podID="de110676-babd-433c-b63e-cd66dc1bd512" containerID="e355084faa8c8085d2d00d13a691fc8aa13716532004ba82dfaa198151393107" exitCode=0 Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.931611 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" event={"ID":"de110676-babd-433c-b63e-cd66dc1bd512","Type":"ContainerDied","Data":"e355084faa8c8085d2d00d13a691fc8aa13716532004ba82dfaa198151393107"} Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.953277 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" event={"ID":"3c111666-b29d-4cfa-988b-f216dae8a8b0","Type":"ContainerDied","Data":"5833bcfb9098f1bdb8a30eb44681edf17e4496bb512acfe916aee93be5b472a0"} Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.953342 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5833bcfb9098f1bdb8a30eb44681edf17e4496bb512acfe916aee93be5b472a0" Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.953726 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.976018 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"44a85e36-b029-4450-b8aa-11bf910d8139","Type":"ContainerStarted","Data":"43b0652114547167534401db38cd2750aeeb2ff838f6b816b106e8575b72156a"} Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.981257 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"43d26c42-eba9-4e5c-bd2d-7cdf7074a176","Type":"ContainerStarted","Data":"54710067d503f0767535c3402566a371fe3a8880fb8308b806d7df91f83dbb04"} Dec 04 15:22:18 crc kubenswrapper[4946]: I1204 15:22:18.983964 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f4481828-f464-47c6-a803-0c1962101efa","Type":"ContainerStarted","Data":"9ec3f7c4eb651702a7d79bd330dea930543197ac1b7fa44a3917e6e050c70b00"} Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.055998 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-c5fz8"] Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.065279 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-c5fz8"] Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.078887 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-dns-svc\") pod \"3c111666-b29d-4cfa-988b-f216dae8a8b0\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.079062 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2xcf\" (UniqueName: \"kubernetes.io/projected/3c111666-b29d-4cfa-988b-f216dae8a8b0-kube-api-access-k2xcf\") pod \"3c111666-b29d-4cfa-988b-f216dae8a8b0\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.079091 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-config\") pod \"3c111666-b29d-4cfa-988b-f216dae8a8b0\" (UID: \"3c111666-b29d-4cfa-988b-f216dae8a8b0\") " Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.081319 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-config" (OuterVolumeSpecName: "config") pod "3c111666-b29d-4cfa-988b-f216dae8a8b0" (UID: "3c111666-b29d-4cfa-988b-f216dae8a8b0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.081753 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3c111666-b29d-4cfa-988b-f216dae8a8b0" (UID: "3c111666-b29d-4cfa-988b-f216dae8a8b0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.090050 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c111666-b29d-4cfa-988b-f216dae8a8b0-kube-api-access-k2xcf" (OuterVolumeSpecName: "kube-api-access-k2xcf") pod "3c111666-b29d-4cfa-988b-f216dae8a8b0" (UID: "3c111666-b29d-4cfa-988b-f216dae8a8b0"). InnerVolumeSpecName "kube-api-access-k2xcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.094018 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.112612 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qv4hw"] Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.181977 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.182033 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2xcf\" (UniqueName: \"kubernetes.io/projected/3c111666-b29d-4cfa-988b-f216dae8a8b0-kube-api-access-k2xcf\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.182049 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c111666-b29d-4cfa-988b-f216dae8a8b0-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.230888 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-hc6tt"] Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.269780 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq"] Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.280078 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 04 15:22:19 crc kubenswrapper[4946]: W1204 15:22:19.380648 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c369924_f384_4ca1_b3ac_e1b334790f15.slice/crio-4a6cb2f2cf3e4c0caf06c3915eed00d380a056a6d974c5ecd9274562c0913e16 WatchSource:0}: Error finding container 4a6cb2f2cf3e4c0caf06c3915eed00d380a056a6d974c5ecd9274562c0913e16: Status 404 returned error can't find the container with id 4a6cb2f2cf3e4c0caf06c3915eed00d380a056a6d974c5ecd9274562c0913e16 Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.383398 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.547529 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31b254b2-a6bc-4ee5-aa20-7dc8f577b114" path="/var/lib/kubelet/pods/31b254b2-a6bc-4ee5-aa20-7dc8f577b114/volumes" Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.557182 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc"] Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.584375 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6"] Dec 04 15:22:19 crc kubenswrapper[4946]: W1204 15:22:19.590784 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf89f1623_6a48_4db4_8059_940887046c8e.slice/crio-d06ca7470945260ef349110fee81499164906dd158af0eb54e7e7d0c877ccb23 WatchSource:0}: Error finding container d06ca7470945260ef349110fee81499164906dd158af0eb54e7e7d0c877ccb23: Status 404 returned error can't find the container with id d06ca7470945260ef349110fee81499164906dd158af0eb54e7e7d0c877ccb23 Dec 04 15:22:19 crc kubenswrapper[4946]: E1204 15:22:19.656035 4946 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 04 15:22:19 crc kubenswrapper[4946]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/de110676-babd-433c-b63e-cd66dc1bd512/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 04 15:22:19 crc kubenswrapper[4946]: > podSandboxID="b96ed6ca9c36e1b9c853a9027cb739cdd81e548a30e7dde1bfc48d692bc92965" Dec 04 15:22:19 crc kubenswrapper[4946]: E1204 15:22:19.656248 4946 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 04 15:22:19 crc kubenswrapper[4946]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-82sp6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-jbxrv_openstack(de110676-babd-433c-b63e-cd66dc1bd512): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/de110676-babd-433c-b63e-cd66dc1bd512/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 04 15:22:19 crc kubenswrapper[4946]: > logger="UnhandledError" Dec 04 15:22:19 crc kubenswrapper[4946]: E1204 15:22:19.658104 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/de110676-babd-433c-b63e-cd66dc1bd512/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" podUID="de110676-babd-433c-b63e-cd66dc1bd512" Dec 04 15:22:19 crc kubenswrapper[4946]: W1204 15:22:19.666675 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ffa4fa2_c466_47f5_bca6_613ec9e52779.slice/crio-9f393d6c7405d6b6f04e7d4967d178c40ca400e81acd163041e812338205cffe WatchSource:0}: Error finding container 9f393d6c7405d6b6f04e7d4967d178c40ca400e81acd163041e812338205cffe: Status 404 returned error can't find the container with id 9f393d6c7405d6b6f04e7d4967d178c40ca400e81acd163041e812338205cffe Dec 04 15:22:19 crc kubenswrapper[4946]: W1204 15:22:19.667291 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc72f3e47_f551_4d7e_8978_cf453bc9a80d.slice/crio-1a4173c0c8feae370c757442d27438c021e952f16723e0ac39fe93b9378357e5 WatchSource:0}: Error finding container 1a4173c0c8feae370c757442d27438c021e952f16723e0ac39fe93b9378357e5: Status 404 returned error can't find the container with id 1a4173c0c8feae370c757442d27438c021e952f16723e0ac39fe93b9378357e5 Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.694038 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c"] Dec 04 15:22:19 crc kubenswrapper[4946]: W1204 15:22:19.755561 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddecd9bb2_7749_48ff_b886_74e49bf5222d.slice/crio-130fde33b2e00d795ec338c2662204303cfa678ab262e20472f0363a2c7333c4 WatchSource:0}: Error finding container 130fde33b2e00d795ec338c2662204303cfa678ab262e20472f0363a2c7333c4: Status 404 returned error can't find the container with id 130fde33b2e00d795ec338c2662204303cfa678ab262e20472f0363a2c7333c4 Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.781235 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.796394 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.840002 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 04 15:22:19 crc kubenswrapper[4946]: I1204 15:22:19.854790 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn"] Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.005445 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c6906a68-0819-41bc-a3d8-2ac76e77b67f","Type":"ContainerStarted","Data":"1303c9e6e816ce9c4417127eec8acd68ecc41a23e81527d6bcc3a412d6b1af71"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.007503 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" event={"ID":"b0adb62a-e125-4612-8e57-74bab154a2c4","Type":"ContainerStarted","Data":"f44dc671e47f731f3ac2233fd8973dd16428fe9447db2f67b6b4b5966b1b2029"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.008932 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" event={"ID":"c72f3e47-f551-4d7e-8978-cf453bc9a80d","Type":"ContainerStarted","Data":"1a4173c0c8feae370c757442d27438c021e952f16723e0ac39fe93b9378357e5"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.012256 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"74261534-d493-4bb6-ac4f-e7196daaa71f","Type":"ContainerStarted","Data":"6cefd12de1797d14a137801afe2990a03e705fa3422b0e34e1def2f1c48d0e37"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.016639 4946 generic.go:334] "Generic (PLEG): container finished" podID="bc305935-18fd-43f0-b1de-b588b49ea299" containerID="642b11eea5294f96a8b756fe01aa50551e6e58ed71e4a96c3cca3ba0dd71f5f0" exitCode=0 Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.016753 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" event={"ID":"bc305935-18fd-43f0-b1de-b588b49ea299","Type":"ContainerDied","Data":"642b11eea5294f96a8b756fe01aa50551e6e58ed71e4a96c3cca3ba0dd71f5f0"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.018777 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"b21846fc-0f45-4cae-aea6-b4e3f33ec03a","Type":"ContainerStarted","Data":"3904cbb675eae981966abdb1e8a220a9fa97860846ad4b50186c3f06bf6f2c54"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.021538 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"decd9bb2-7749-48ff-b886-74e49bf5222d","Type":"ContainerStarted","Data":"130fde33b2e00d795ec338c2662204303cfa678ab262e20472f0363a2c7333c4"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.027800 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f89f1623-6a48-4db4-8059-940887046c8e","Type":"ContainerStarted","Data":"d06ca7470945260ef349110fee81499164906dd158af0eb54e7e7d0c877ccb23"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.031468 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hc6tt" event={"ID":"9c369924-f384-4ca1-b3ac-e1b334790f15","Type":"ContainerStarted","Data":"4a6cb2f2cf3e4c0caf06c3915eed00d380a056a6d974c5ecd9274562c0913e16"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.037973 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" event={"ID":"2ffa4fa2-c466-47f5-bca6-613ec9e52779","Type":"ContainerStarted","Data":"9f393d6c7405d6b6f04e7d4967d178c40ca400e81acd163041e812338205cffe"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.046952 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f81b9295-1cdf-44a6-afef-1380c1e3cf54","Type":"ContainerStarted","Data":"f6674fb05bee621d31a43aba76b5c5ab5e463d48671c9751f4891845957f138c"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.049037 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qv4hw" event={"ID":"2734e466-178a-4344-bfac-9adb5e4492a7","Type":"ContainerStarted","Data":"9bd7e782e074f49a8fb9488193bf1daafeda6b7e880dd447aa4fce3035e6fd76"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.052253 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" event={"ID":"02db9740-8e77-440b-95f9-6a2968cd39fe","Type":"ContainerStarted","Data":"eb15bcb874f1d3d848da7e0b3704f2a068a780f8b43a6267899321baba5ddfb7"} Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.052411 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-h9fbt" Dec 04 15:22:20 crc kubenswrapper[4946]: W1204 15:22:20.166083 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55a6559d_165f_4fb0_ac08_a0ba07d02cac.slice/crio-43c2e6a3076f137707d9307270d2ce899c7d5747c4b14609c0ad6aeab9e9d0ba WatchSource:0}: Error finding container 43c2e6a3076f137707d9307270d2ce899c7d5747c4b14609c0ad6aeab9e9d0ba: Status 404 returned error can't find the container with id 43c2e6a3076f137707d9307270d2ce899c7d5747c4b14609c0ad6aeab9e9d0ba Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.276913 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h9fbt"] Dec 04 15:22:20 crc kubenswrapper[4946]: I1204 15:22:20.284846 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h9fbt"] Dec 04 15:22:21 crc kubenswrapper[4946]: I1204 15:22:21.063397 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"47583dfd-ecd6-41d8-ac98-748683cd0ae5","Type":"ContainerStarted","Data":"19f68b99b48c0b062d118f16d39d2effef0af6f38bb4fdd5e9e438e9a633b938"} Dec 04 15:22:21 crc kubenswrapper[4946]: I1204 15:22:21.065449 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" event={"ID":"55a6559d-165f-4fb0-ac08-a0ba07d02cac","Type":"ContainerStarted","Data":"43c2e6a3076f137707d9307270d2ce899c7d5747c4b14609c0ad6aeab9e9d0ba"} Dec 04 15:22:21 crc kubenswrapper[4946]: I1204 15:22:21.072519 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"920eb4d8-3aa1-4141-9f65-647e275405e4","Type":"ContainerStarted","Data":"743ddc87efc06da47d02b8cad5e61ba2b5c71fb18761848f5cb719610cc67896"} Dec 04 15:22:21 crc kubenswrapper[4946]: I1204 15:22:21.076609 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f65583d1-046b-463a-9101-2074072a94f0","Type":"ContainerStarted","Data":"4af82c0e7141a08c616b14294ff018b51e2011189f2e35b47b77c81800165efa"} Dec 04 15:22:21 crc kubenswrapper[4946]: I1204 15:22:21.467874 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c111666-b29d-4cfa-988b-f216dae8a8b0" path="/var/lib/kubelet/pods/3c111666-b29d-4cfa-988b-f216dae8a8b0/volumes" Dec 04 15:22:22 crc kubenswrapper[4946]: I1204 15:22:22.478578 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:22:22 crc kubenswrapper[4946]: I1204 15:22:22.479166 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.493207 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-nx7vc"] Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.496159 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.499536 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.523097 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-nx7vc"] Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.603024 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a94df46-46e0-4178-804b-1582e9cf7738-combined-ca-bundle\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.603108 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcxhl\" (UniqueName: \"kubernetes.io/projected/3a94df46-46e0-4178-804b-1582e9cf7738-kube-api-access-bcxhl\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.603156 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a94df46-46e0-4178-804b-1582e9cf7738-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.603225 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3a94df46-46e0-4178-804b-1582e9cf7738-ovn-rundir\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.603286 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a94df46-46e0-4178-804b-1582e9cf7738-config\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.603304 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3a94df46-46e0-4178-804b-1582e9cf7738-ovs-rundir\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.650651 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-tg6rs"] Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.684397 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-qmhjd"] Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.687171 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.693903 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.705861 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a94df46-46e0-4178-804b-1582e9cf7738-config\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.705912 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3a94df46-46e0-4178-804b-1582e9cf7738-ovs-rundir\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.705969 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a94df46-46e0-4178-804b-1582e9cf7738-combined-ca-bundle\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.706029 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcxhl\" (UniqueName: \"kubernetes.io/projected/3a94df46-46e0-4178-804b-1582e9cf7738-kube-api-access-bcxhl\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.706050 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a94df46-46e0-4178-804b-1582e9cf7738-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.706143 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3a94df46-46e0-4178-804b-1582e9cf7738-ovn-rundir\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.706579 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3a94df46-46e0-4178-804b-1582e9cf7738-ovn-rundir\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.709828 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3a94df46-46e0-4178-804b-1582e9cf7738-ovs-rundir\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.714078 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-qmhjd"] Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.720499 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a94df46-46e0-4178-804b-1582e9cf7738-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.724540 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a94df46-46e0-4178-804b-1582e9cf7738-config\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.780074 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a94df46-46e0-4178-804b-1582e9cf7738-combined-ca-bundle\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.785078 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcxhl\" (UniqueName: \"kubernetes.io/projected/3a94df46-46e0-4178-804b-1582e9cf7738-kube-api-access-bcxhl\") pod \"ovn-controller-metrics-nx7vc\" (UID: \"3a94df46-46e0-4178-804b-1582e9cf7738\") " pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.808658 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlcpb\" (UniqueName: \"kubernetes.io/projected/a2b934a8-2f23-4bcc-93bf-54adbdc19769-kube-api-access-rlcpb\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.808758 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-config\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.808828 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.808920 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.832806 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-nx7vc" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.913515 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.913978 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlcpb\" (UniqueName: \"kubernetes.io/projected/a2b934a8-2f23-4bcc-93bf-54adbdc19769-kube-api-access-rlcpb\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.914785 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-config\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.915041 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.914827 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.915590 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-config\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.916030 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.938606 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlcpb\" (UniqueName: \"kubernetes.io/projected/a2b934a8-2f23-4bcc-93bf-54adbdc19769-kube-api-access-rlcpb\") pod \"dnsmasq-dns-7fd796d7df-qmhjd\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:30 crc kubenswrapper[4946]: I1204 15:22:30.962137 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jbxrv"] Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.005269 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-nq7lb"] Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.007359 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.016724 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.022763 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-nq7lb"] Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.047111 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.121716 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lndnk\" (UniqueName: \"kubernetes.io/projected/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-kube-api-access-lndnk\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.121890 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.121982 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-config\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.122022 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.122066 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.225034 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.225189 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-config\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.225253 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.225311 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.225416 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lndnk\" (UniqueName: \"kubernetes.io/projected/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-kube-api-access-lndnk\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.227027 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.230057 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.230538 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-config\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.231052 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.247632 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lndnk\" (UniqueName: \"kubernetes.io/projected/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-kube-api-access-lndnk\") pod \"dnsmasq-dns-86db49b7ff-nq7lb\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:31 crc kubenswrapper[4946]: I1204 15:22:31.351060 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:33 crc kubenswrapper[4946]: E1204 15:22:33.289723 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 04 15:22:33 crc kubenswrapper[4946]: E1204 15:22:33.290431 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bk5pp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(43d26c42-eba9-4e5c-bd2d-7cdf7074a176): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:22:33 crc kubenswrapper[4946]: E1204 15:22:33.291706 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="43d26c42-eba9-4e5c-bd2d-7cdf7074a176" Dec 04 15:22:33 crc kubenswrapper[4946]: E1204 15:22:33.313017 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 04 15:22:33 crc kubenswrapper[4946]: E1204 15:22:33.313993 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jj6pl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(44a85e36-b029-4450-b8aa-11bf910d8139): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:22:33 crc kubenswrapper[4946]: E1204 15:22:33.315493 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="44a85e36-b029-4450-b8aa-11bf910d8139" Dec 04 15:22:33 crc kubenswrapper[4946]: I1204 15:22:33.328775 4946 scope.go:117] "RemoveContainer" containerID="bbe000437c84fe84557ce6862eb115ed856dcc6822c38fc66a907160d33c2dd0" Dec 04 15:22:33 crc kubenswrapper[4946]: E1204 15:22:33.540012 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified" Dec 04 15:22:33 crc kubenswrapper[4946]: E1204 15:22:33.540237 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-sb,Image:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n57bh689h5c4hddh58bh76h546h564h9bh7h57ch5cfh59fhc6h7bh5b8h654hd4h579h57dh65dh647h64h8bh545h64h5cfh55dh67h9ch64bh559q,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-sb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rngw6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(f89f1623-6a48-4db4-8059-940887046c8e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:22:33 crc kubenswrapper[4946]: I1204 15:22:33.781756 4946 scope.go:117] "RemoveContainer" containerID="7ee83a67e650a78796aa29579bde5dd9bea407bf85ba93f88fea2743d84af184" Dec 04 15:22:34 crc kubenswrapper[4946]: I1204 15:22:34.073433 4946 scope.go:117] "RemoveContainer" containerID="00c50aaeda450cb2cd05ea9ad76af73ec767118ac3cca43e2f2e361088115da8" Dec 04 15:22:34 crc kubenswrapper[4946]: I1204 15:22:34.242397 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" podUID="bc305935-18fd-43f0-b1de-b588b49ea299" containerName="dnsmasq-dns" containerID="cri-o://2dc6e4c7bb421bce35cf4b39ec7a020b3055a60fe92ce33d202140b49b1c7cf1" gracePeriod=10 Dec 04 15:22:34 crc kubenswrapper[4946]: I1204 15:22:34.242547 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" event={"ID":"bc305935-18fd-43f0-b1de-b588b49ea299","Type":"ContainerStarted","Data":"2dc6e4c7bb421bce35cf4b39ec7a020b3055a60fe92ce33d202140b49b1c7cf1"} Dec 04 15:22:34 crc kubenswrapper[4946]: I1204 15:22:34.243401 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:22:34 crc kubenswrapper[4946]: E1204 15:22:34.257590 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="44a85e36-b029-4450-b8aa-11bf910d8139" Dec 04 15:22:34 crc kubenswrapper[4946]: E1204 15:22:34.257799 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="43d26c42-eba9-4e5c-bd2d-7cdf7074a176" Dec 04 15:22:34 crc kubenswrapper[4946]: I1204 15:22:34.278279 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" podStartSLOduration=17.784062651 podStartE2EDuration="39.278247789s" podCreationTimestamp="2025-12-04 15:21:55 +0000 UTC" firstStartedPulling="2025-12-04 15:21:56.836753722 +0000 UTC m=+1167.722797363" lastFinishedPulling="2025-12-04 15:22:18.33093885 +0000 UTC m=+1189.216982501" observedRunningTime="2025-12-04 15:22:34.272282346 +0000 UTC m=+1205.158326017" watchObservedRunningTime="2025-12-04 15:22:34.278247789 +0000 UTC m=+1205.164291430" Dec 04 15:22:34 crc kubenswrapper[4946]: I1204 15:22:34.604441 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-nq7lb"] Dec 04 15:22:34 crc kubenswrapper[4946]: I1204 15:22:34.686556 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-nx7vc"] Dec 04 15:22:34 crc kubenswrapper[4946]: I1204 15:22:34.712713 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-qmhjd"] Dec 04 15:22:35 crc kubenswrapper[4946]: W1204 15:22:35.132764 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a94df46_46e0_4178_804b_1582e9cf7738.slice/crio-ae825e61d327c244ae052341322033397d9dfa0c99d3bdc4691a2ad46baffc09 WatchSource:0}: Error finding container ae825e61d327c244ae052341322033397d9dfa0c99d3bdc4691a2ad46baffc09: Status 404 returned error can't find the container with id ae825e61d327c244ae052341322033397d9dfa0c99d3bdc4691a2ad46baffc09 Dec 04 15:22:35 crc kubenswrapper[4946]: W1204 15:22:35.135283 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0181c3d1_7d0e_4959_a9cb_e4e00e01188a.slice/crio-59d38d343a594947bd044d3e3461b2eaab7341b97cdd31fbf57347198944291b WatchSource:0}: Error finding container 59d38d343a594947bd044d3e3461b2eaab7341b97cdd31fbf57347198944291b: Status 404 returned error can't find the container with id 59d38d343a594947bd044d3e3461b2eaab7341b97cdd31fbf57347198944291b Dec 04 15:22:35 crc kubenswrapper[4946]: W1204 15:22:35.136854 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2b934a8_2f23_4bcc_93bf_54adbdc19769.slice/crio-af1288c982425beb7b1e98cdb1d0c84d8bfdb336e6cd8822d63d10df5747d1bd WatchSource:0}: Error finding container af1288c982425beb7b1e98cdb1d0c84d8bfdb336e6cd8822d63d10df5747d1bd: Status 404 returned error can't find the container with id af1288c982425beb7b1e98cdb1d0c84d8bfdb336e6cd8822d63d10df5747d1bd Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.287469 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" event={"ID":"0181c3d1-7d0e-4959-a9cb-e4e00e01188a","Type":"ContainerStarted","Data":"59d38d343a594947bd044d3e3461b2eaab7341b97cdd31fbf57347198944291b"} Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.289359 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" event={"ID":"a2b934a8-2f23-4bcc-93bf-54adbdc19769","Type":"ContainerStarted","Data":"af1288c982425beb7b1e98cdb1d0c84d8bfdb336e6cd8822d63d10df5747d1bd"} Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.293107 4946 generic.go:334] "Generic (PLEG): container finished" podID="9c369924-f384-4ca1-b3ac-e1b334790f15" containerID="c8b74ff7a8981260885e87e8a20f8aab72faa5653bd3800e60fe6fd7c44e9c27" exitCode=0 Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.294468 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hc6tt" event={"ID":"9c369924-f384-4ca1-b3ac-e1b334790f15","Type":"ContainerDied","Data":"c8b74ff7a8981260885e87e8a20f8aab72faa5653bd3800e60fe6fd7c44e9c27"} Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.298979 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-nx7vc" event={"ID":"3a94df46-46e0-4178-804b-1582e9cf7738","Type":"ContainerStarted","Data":"ae825e61d327c244ae052341322033397d9dfa0c99d3bdc4691a2ad46baffc09"} Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.302060 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" event={"ID":"de110676-babd-433c-b63e-cd66dc1bd512","Type":"ContainerStarted","Data":"b5019b25beb10bf708dc56bb55f187c7d2c44edc038bec49b75d5485b4d9688f"} Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.302295 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.302332 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" podUID="de110676-babd-433c-b63e-cd66dc1bd512" containerName="dnsmasq-dns" containerID="cri-o://b5019b25beb10bf708dc56bb55f187c7d2c44edc038bec49b75d5485b4d9688f" gracePeriod=10 Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.305922 4946 generic.go:334] "Generic (PLEG): container finished" podID="bc305935-18fd-43f0-b1de-b588b49ea299" containerID="2dc6e4c7bb421bce35cf4b39ec7a020b3055a60fe92ce33d202140b49b1c7cf1" exitCode=0 Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.306013 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" event={"ID":"bc305935-18fd-43f0-b1de-b588b49ea299","Type":"ContainerDied","Data":"2dc6e4c7bb421bce35cf4b39ec7a020b3055a60fe92ce33d202140b49b1c7cf1"} Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.306051 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" event={"ID":"bc305935-18fd-43f0-b1de-b588b49ea299","Type":"ContainerDied","Data":"4373e404a072f5520c12b9873f895cc8c3cf830bdf85fe14aa884d0276e0d1ec"} Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.306063 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4373e404a072f5520c12b9873f895cc8c3cf830bdf85fe14aa884d0276e0d1ec" Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.346619 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" podStartSLOduration=18.875112055 podStartE2EDuration="40.346590259s" podCreationTimestamp="2025-12-04 15:21:55 +0000 UTC" firstStartedPulling="2025-12-04 15:21:56.696937436 +0000 UTC m=+1167.582981077" lastFinishedPulling="2025-12-04 15:22:18.16841564 +0000 UTC m=+1189.054459281" observedRunningTime="2025-12-04 15:22:35.340253565 +0000 UTC m=+1206.226297206" watchObservedRunningTime="2025-12-04 15:22:35.346590259 +0000 UTC m=+1206.232633900" Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.702783 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.843205 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-dns-svc\") pod \"bc305935-18fd-43f0-b1de-b588b49ea299\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.843881 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-td6qb\" (UniqueName: \"kubernetes.io/projected/bc305935-18fd-43f0-b1de-b588b49ea299-kube-api-access-td6qb\") pod \"bc305935-18fd-43f0-b1de-b588b49ea299\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.844039 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-config\") pod \"bc305935-18fd-43f0-b1de-b588b49ea299\" (UID: \"bc305935-18fd-43f0-b1de-b588b49ea299\") " Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.878643 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc305935-18fd-43f0-b1de-b588b49ea299-kube-api-access-td6qb" (OuterVolumeSpecName: "kube-api-access-td6qb") pod "bc305935-18fd-43f0-b1de-b588b49ea299" (UID: "bc305935-18fd-43f0-b1de-b588b49ea299"). InnerVolumeSpecName "kube-api-access-td6qb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.935634 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-config" (OuterVolumeSpecName: "config") pod "bc305935-18fd-43f0-b1de-b588b49ea299" (UID: "bc305935-18fd-43f0-b1de-b588b49ea299"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.947580 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-td6qb\" (UniqueName: \"kubernetes.io/projected/bc305935-18fd-43f0-b1de-b588b49ea299-kube-api-access-td6qb\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:35 crc kubenswrapper[4946]: I1204 15:22:35.947634 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:36 crc kubenswrapper[4946]: I1204 15:22:36.327166 4946 generic.go:334] "Generic (PLEG): container finished" podID="de110676-babd-433c-b63e-cd66dc1bd512" containerID="b5019b25beb10bf708dc56bb55f187c7d2c44edc038bec49b75d5485b4d9688f" exitCode=0 Dec 04 15:22:36 crc kubenswrapper[4946]: I1204 15:22:36.327176 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" event={"ID":"de110676-babd-433c-b63e-cd66dc1bd512","Type":"ContainerDied","Data":"b5019b25beb10bf708dc56bb55f187c7d2c44edc038bec49b75d5485b4d9688f"} Dec 04 15:22:36 crc kubenswrapper[4946]: I1204 15:22:36.327407 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-tg6rs" Dec 04 15:22:36 crc kubenswrapper[4946]: I1204 15:22:36.342300 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bc305935-18fd-43f0-b1de-b588b49ea299" (UID: "bc305935-18fd-43f0-b1de-b588b49ea299"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:22:36 crc kubenswrapper[4946]: I1204 15:22:36.357174 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc305935-18fd-43f0-b1de-b588b49ea299-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:36 crc kubenswrapper[4946]: I1204 15:22:36.828453 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-tg6rs"] Dec 04 15:22:36 crc kubenswrapper[4946]: I1204 15:22:36.852596 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-tg6rs"] Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.338473 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c6906a68-0819-41bc-a3d8-2ac76e77b67f","Type":"ContainerStarted","Data":"1ba834e8319aca45cc1983d8c76eaecff36a5d11748d42294f85f4e48c8aa06a"} Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.340911 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" event={"ID":"b0adb62a-e125-4612-8e57-74bab154a2c4","Type":"ContainerStarted","Data":"1df56a1e948ed930c422783d82f207d6f76525167622f66a4cb9944b6cfbb7ac"} Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.343152 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.347499 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" event={"ID":"c72f3e47-f551-4d7e-8978-cf453bc9a80d","Type":"ContainerStarted","Data":"b54e1124c9199a63944ada8a52267d9feb96bb67ecbf1dc1ddac18346c0193e6"} Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.347946 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.350147 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" podUID="c72f3e47-f551-4d7e-8978-cf453bc9a80d" containerName="gateway" probeResult="failure" output="Get \"https://10.217.0.123:8081/ready\": dial tcp 10.217.0.123:8081: connect: connection refused" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.354199 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" event={"ID":"2ffa4fa2-c466-47f5-bca6-613ec9e52779","Type":"ContainerStarted","Data":"6f40dad66b5df2cf46fc969c6a38c7fdd4329ac69865b1f859ba0eb454a3b1ba"} Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.355383 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.370584 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"b21846fc-0f45-4cae-aea6-b4e3f33ec03a","Type":"ContainerStarted","Data":"c506813594b22b5aa8b115200dc5913378b8a6d7ebfd41832a65253f384e5da8"} Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.371950 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.373322 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" podStartSLOduration=9.379225021 podStartE2EDuration="23.373291764s" podCreationTimestamp="2025-12-04 15:22:14 +0000 UTC" firstStartedPulling="2025-12-04 15:22:19.734331172 +0000 UTC m=+1190.620374813" lastFinishedPulling="2025-12-04 15:22:33.728397915 +0000 UTC m=+1204.614441556" observedRunningTime="2025-12-04 15:22:37.367986489 +0000 UTC m=+1208.254030150" watchObservedRunningTime="2025-12-04 15:22:37.373291764 +0000 UTC m=+1208.259335395" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.382900 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"a304ef91-9673-43d6-8b91-0ba511961217","Type":"ContainerStarted","Data":"be84699f1e5f63867e6a154b20cb43db1c774d4574a42f2ce36344aae11ae21e"} Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.384433 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.387084 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"decd9bb2-7749-48ff-b886-74e49bf5222d","Type":"ContainerStarted","Data":"a3c6231c298932ebea2bb6dc0a030c7932c5c6bb7de8d6a8cc0dd189aa48a64f"} Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.387202 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.396828 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" podStartSLOduration=8.337479098 podStartE2EDuration="22.396803178s" podCreationTimestamp="2025-12-04 15:22:15 +0000 UTC" firstStartedPulling="2025-12-04 15:22:19.734495576 +0000 UTC m=+1190.620539217" lastFinishedPulling="2025-12-04 15:22:33.793819656 +0000 UTC m=+1204.679863297" observedRunningTime="2025-12-04 15:22:37.390172866 +0000 UTC m=+1208.276216717" watchObservedRunningTime="2025-12-04 15:22:37.396803178 +0000 UTC m=+1208.282846819" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.400336 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" event={"ID":"02db9740-8e77-440b-95f9-6a2968cd39fe","Type":"ContainerStarted","Data":"9a4ef806a0e27e12f6c1cace7b88fe48b59d1e9f4210b5852527de1b8f6f88e5"} Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.401514 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.404916 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"74261534-d493-4bb6-ac4f-e7196daaa71f","Type":"ContainerStarted","Data":"9777b7b8e2eff4064187c5d981597684c13202d5d4c78e0cb35d5c40c36b6757"} Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.404997 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.406939 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" event={"ID":"55a6559d-165f-4fb0-ac08-a0ba07d02cac","Type":"ContainerStarted","Data":"bcc4498ef4878f9cc94851748038b0754be24fda5ea7bc8b1336b4b420bc1c84"} Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.407269 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.423554 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jtbm6" podStartSLOduration=7.675469753 podStartE2EDuration="22.423509239s" podCreationTimestamp="2025-12-04 15:22:15 +0000 UTC" firstStartedPulling="2025-12-04 15:22:19.721447839 +0000 UTC m=+1190.607491480" lastFinishedPulling="2025-12-04 15:22:34.469487325 +0000 UTC m=+1205.355530966" observedRunningTime="2025-12-04 15:22:37.413505495 +0000 UTC m=+1208.299549136" watchObservedRunningTime="2025-12-04 15:22:37.423509239 +0000 UTC m=+1208.309552880" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.516187 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" podStartSLOduration=8.580359389 podStartE2EDuration="23.516151845s" podCreationTimestamp="2025-12-04 15:22:14 +0000 UTC" firstStartedPulling="2025-12-04 15:22:19.527845499 +0000 UTC m=+1190.413889140" lastFinishedPulling="2025-12-04 15:22:34.463637955 +0000 UTC m=+1205.349681596" observedRunningTime="2025-12-04 15:22:37.441297916 +0000 UTC m=+1208.327341557" watchObservedRunningTime="2025-12-04 15:22:37.516151845 +0000 UTC m=+1208.402195476" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.521333 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc305935-18fd-43f0-b1de-b588b49ea299" path="/var/lib/kubelet/pods/bc305935-18fd-43f0-b1de-b588b49ea299/volumes" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.533482 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-ingester-0" podStartSLOduration=8.950596975 podStartE2EDuration="23.533454939s" podCreationTimestamp="2025-12-04 15:22:14 +0000 UTC" firstStartedPulling="2025-12-04 15:22:19.765929127 +0000 UTC m=+1190.651972768" lastFinishedPulling="2025-12-04 15:22:34.348787091 +0000 UTC m=+1205.234830732" observedRunningTime="2025-12-04 15:22:37.470800954 +0000 UTC m=+1208.356844595" watchObservedRunningTime="2025-12-04 15:22:37.533454939 +0000 UTC m=+1208.419498570" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.538275 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" podStartSLOduration=8.244184684 podStartE2EDuration="22.538258331s" podCreationTimestamp="2025-12-04 15:22:15 +0000 UTC" firstStartedPulling="2025-12-04 15:22:20.170566275 +0000 UTC m=+1191.056609916" lastFinishedPulling="2025-12-04 15:22:34.464639922 +0000 UTC m=+1205.350683563" observedRunningTime="2025-12-04 15:22:37.506282785 +0000 UTC m=+1208.392326426" watchObservedRunningTime="2025-12-04 15:22:37.538258331 +0000 UTC m=+1208.424301972" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.551341 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-index-gateway-0" podStartSLOduration=8.302792258 podStartE2EDuration="22.551316818s" podCreationTimestamp="2025-12-04 15:22:15 +0000 UTC" firstStartedPulling="2025-12-04 15:22:19.868517406 +0000 UTC m=+1190.754561047" lastFinishedPulling="2025-12-04 15:22:34.117041966 +0000 UTC m=+1205.003085607" observedRunningTime="2025-12-04 15:22:37.538327433 +0000 UTC m=+1208.424371074" watchObservedRunningTime="2025-12-04 15:22:37.551316818 +0000 UTC m=+1208.437360459" Dec 04 15:22:37 crc kubenswrapper[4946]: I1204 15:22:37.561077 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=23.423586116 podStartE2EDuration="37.561044355s" podCreationTimestamp="2025-12-04 15:22:00 +0000 UTC" firstStartedPulling="2025-12-04 15:22:19.656226874 +0000 UTC m=+1190.542270515" lastFinishedPulling="2025-12-04 15:22:33.793685113 +0000 UTC m=+1204.679728754" observedRunningTime="2025-12-04 15:22:37.559648746 +0000 UTC m=+1208.445692387" watchObservedRunningTime="2025-12-04 15:22:37.561044355 +0000 UTC m=+1208.447087996" Dec 04 15:22:38 crc kubenswrapper[4946]: I1204 15:22:38.427500 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qv4hw" event={"ID":"2734e466-178a-4344-bfac-9adb5e4492a7","Type":"ContainerStarted","Data":"35e0be699902c651b02cf459b3fb9e95cd2bf614d358cce93b5d8c446fdc39a3"} Dec 04 15:22:38 crc kubenswrapper[4946]: I1204 15:22:38.427626 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-qv4hw" Dec 04 15:22:38 crc kubenswrapper[4946]: I1204 15:22:38.435472 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f4481828-f464-47c6-a803-0c1962101efa","Type":"ContainerStarted","Data":"df1e7483f0fcfd97b3e3a7241aaae24f5fa48bebe91016ebba2b34b22f65ef80"} Dec 04 15:22:38 crc kubenswrapper[4946]: I1204 15:22:38.455923 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-c6jcc" Dec 04 15:22:38 crc kubenswrapper[4946]: I1204 15:22:38.458268 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-qv4hw" podStartSLOduration=16.930174656 podStartE2EDuration="31.458237808s" podCreationTimestamp="2025-12-04 15:22:07 +0000 UTC" firstStartedPulling="2025-12-04 15:22:19.141097541 +0000 UTC m=+1190.027141182" lastFinishedPulling="2025-12-04 15:22:33.669160693 +0000 UTC m=+1204.555204334" observedRunningTime="2025-12-04 15:22:38.448831371 +0000 UTC m=+1209.334875032" watchObservedRunningTime="2025-12-04 15:22:38.458237808 +0000 UTC m=+1209.344281459" Dec 04 15:22:41 crc kubenswrapper[4946]: I1204 15:22:41.743859 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:22:41 crc kubenswrapper[4946]: I1204 15:22:41.904403 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82sp6\" (UniqueName: \"kubernetes.io/projected/de110676-babd-433c-b63e-cd66dc1bd512-kube-api-access-82sp6\") pod \"de110676-babd-433c-b63e-cd66dc1bd512\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " Dec 04 15:22:41 crc kubenswrapper[4946]: I1204 15:22:41.904568 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-config\") pod \"de110676-babd-433c-b63e-cd66dc1bd512\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " Dec 04 15:22:41 crc kubenswrapper[4946]: I1204 15:22:41.905088 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-dns-svc\") pod \"de110676-babd-433c-b63e-cd66dc1bd512\" (UID: \"de110676-babd-433c-b63e-cd66dc1bd512\") " Dec 04 15:22:41 crc kubenswrapper[4946]: I1204 15:22:41.916331 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de110676-babd-433c-b63e-cd66dc1bd512-kube-api-access-82sp6" (OuterVolumeSpecName: "kube-api-access-82sp6") pod "de110676-babd-433c-b63e-cd66dc1bd512" (UID: "de110676-babd-433c-b63e-cd66dc1bd512"). InnerVolumeSpecName "kube-api-access-82sp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:22:41 crc kubenswrapper[4946]: I1204 15:22:41.990991 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "de110676-babd-433c-b63e-cd66dc1bd512" (UID: "de110676-babd-433c-b63e-cd66dc1bd512"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:22:41 crc kubenswrapper[4946]: I1204 15:22:41.998345 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-config" (OuterVolumeSpecName: "config") pod "de110676-babd-433c-b63e-cd66dc1bd512" (UID: "de110676-babd-433c-b63e-cd66dc1bd512"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.008401 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.008456 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82sp6\" (UniqueName: \"kubernetes.io/projected/de110676-babd-433c-b63e-cd66dc1bd512-kube-api-access-82sp6\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.008469 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de110676-babd-433c-b63e-cd66dc1bd512-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:42 crc kubenswrapper[4946]: E1204 15:22:42.459159 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-sb-0" podUID="f89f1623-6a48-4db4-8059-940887046c8e" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.483374 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f89f1623-6a48-4db4-8059-940887046c8e","Type":"ContainerStarted","Data":"13e25cf19c81b92b4fb83055d4097657cf899398e3746071c21d38ed81740398"} Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.485646 4946 generic.go:334] "Generic (PLEG): container finished" podID="0181c3d1-7d0e-4959-a9cb-e4e00e01188a" containerID="f4842c4c185feab1d9782038b57ef47bad92e0a65116c39e7bdc62f0b656ae31" exitCode=0 Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.485705 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" event={"ID":"0181c3d1-7d0e-4959-a9cb-e4e00e01188a","Type":"ContainerDied","Data":"f4842c4c185feab1d9782038b57ef47bad92e0a65116c39e7bdc62f0b656ae31"} Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.491696 4946 generic.go:334] "Generic (PLEG): container finished" podID="a2b934a8-2f23-4bcc-93bf-54adbdc19769" containerID="73acd6ab19a95726068de4e802adfb20c0218f4fceebd598b13f13ee14143ccd" exitCode=0 Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.491783 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" event={"ID":"a2b934a8-2f23-4bcc-93bf-54adbdc19769","Type":"ContainerDied","Data":"73acd6ab19a95726068de4e802adfb20c0218f4fceebd598b13f13ee14143ccd"} Dec 04 15:22:42 crc kubenswrapper[4946]: E1204 15:22:42.498904 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="f89f1623-6a48-4db4-8059-940887046c8e" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.506361 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hc6tt" event={"ID":"9c369924-f384-4ca1-b3ac-e1b334790f15","Type":"ContainerStarted","Data":"56f4bd9d70ed7704507c9e42ffe248a3c377bd04334b0617cdf54ffcc4595ae9"} Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.513769 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"47583dfd-ecd6-41d8-ac98-748683cd0ae5","Type":"ContainerStarted","Data":"a3331be503cadfc8da2f7575907d3cd9f18506ef9caa8e11dcc37c2448b7cb6f"} Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.514700 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.516623 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" event={"ID":"de110676-babd-433c-b63e-cd66dc1bd512","Type":"ContainerDied","Data":"b96ed6ca9c36e1b9c853a9027cb739cdd81e548a30e7dde1bfc48d692bc92965"} Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.516658 4946 scope.go:117] "RemoveContainer" containerID="b5019b25beb10bf708dc56bb55f187c7d2c44edc038bec49b75d5485b4d9688f" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.516769 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.537134 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f81b9295-1cdf-44a6-afef-1380c1e3cf54","Type":"ContainerStarted","Data":"4bf23230ab38e503d2a2a21f70c8a2d5390fe12439cfed141fe9b11cd8012bdd"} Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.538989 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.602500 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=18.679152872 podStartE2EDuration="41.60248007s" podCreationTimestamp="2025-12-04 15:22:01 +0000 UTC" firstStartedPulling="2025-12-04 15:22:19.150455217 +0000 UTC m=+1190.036498858" lastFinishedPulling="2025-12-04 15:22:42.073782405 +0000 UTC m=+1212.959826056" observedRunningTime="2025-12-04 15:22:42.586934664 +0000 UTC m=+1213.472978305" watchObservedRunningTime="2025-12-04 15:22:42.60248007 +0000 UTC m=+1213.488523711" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.628941 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-compactor-0" podStartSLOduration=13.245014318 podStartE2EDuration="27.628915404s" podCreationTimestamp="2025-12-04 15:22:15 +0000 UTC" firstStartedPulling="2025-12-04 15:22:20.07977493 +0000 UTC m=+1190.965818571" lastFinishedPulling="2025-12-04 15:22:34.463676016 +0000 UTC m=+1205.349719657" observedRunningTime="2025-12-04 15:22:42.614352265 +0000 UTC m=+1213.500395906" watchObservedRunningTime="2025-12-04 15:22:42.628915404 +0000 UTC m=+1213.514959035" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.684687 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jbxrv"] Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.686108 4946 scope.go:117] "RemoveContainer" containerID="e355084faa8c8085d2d00d13a691fc8aa13716532004ba82dfaa198151393107" Dec 04 15:22:42 crc kubenswrapper[4946]: I1204 15:22:42.697257 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-jbxrv"] Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.475181 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de110676-babd-433c-b63e-cd66dc1bd512" path="/var/lib/kubelet/pods/de110676-babd-433c-b63e-cd66dc1bd512/volumes" Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.555387 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" event={"ID":"a2b934a8-2f23-4bcc-93bf-54adbdc19769","Type":"ContainerStarted","Data":"6c6191e413c0bd4c91a90fe6f4a5e565cf5a1173dfeba987715692f002e6da58"} Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.556788 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.561718 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hc6tt" event={"ID":"9c369924-f384-4ca1-b3ac-e1b334790f15","Type":"ContainerStarted","Data":"d6b750fa9703791994cf700f685573af994baca6c771a8cf28cd6e27cef4db5b"} Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.561899 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.563392 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-nx7vc" event={"ID":"3a94df46-46e0-4178-804b-1582e9cf7738","Type":"ContainerStarted","Data":"b111431f9ccd9ae225f6ab29fc1a8ec9f80e069153e207971353c507a2a11af4"} Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.566226 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.566684 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c6906a68-0819-41bc-a3d8-2ac76e77b67f","Type":"ContainerStarted","Data":"4eb412e79b3c0fbee8bc964b354b3f3ca2885040c2be8c47a819be1b37bcdea4"} Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.569298 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" event={"ID":"0181c3d1-7d0e-4959-a9cb-e4e00e01188a","Type":"ContainerStarted","Data":"012375251ecb14ff1f4a9357bc9da82be53eb5622f52eda1bd8d8d00c9fc5f14"} Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.591435 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" podStartSLOduration=13.591410055 podStartE2EDuration="13.591410055s" podCreationTimestamp="2025-12-04 15:22:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:22:43.580788884 +0000 UTC m=+1214.466832525" watchObservedRunningTime="2025-12-04 15:22:43.591410055 +0000 UTC m=+1214.477453696" Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.615128 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=15.296274098 podStartE2EDuration="38.615083833s" podCreationTimestamp="2025-12-04 15:22:05 +0000 UTC" firstStartedPulling="2025-12-04 15:22:18.889574775 +0000 UTC m=+1189.775618416" lastFinishedPulling="2025-12-04 15:22:42.20838451 +0000 UTC m=+1213.094428151" observedRunningTime="2025-12-04 15:22:43.601696197 +0000 UTC m=+1214.487739848" watchObservedRunningTime="2025-12-04 15:22:43.615083833 +0000 UTC m=+1214.501127474" Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.659940 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-hc6tt" podStartSLOduration=22.765900195 podStartE2EDuration="36.659919521s" podCreationTimestamp="2025-12-04 15:22:07 +0000 UTC" firstStartedPulling="2025-12-04 15:22:19.383928288 +0000 UTC m=+1190.269971939" lastFinishedPulling="2025-12-04 15:22:33.277947634 +0000 UTC m=+1204.163991265" observedRunningTime="2025-12-04 15:22:43.655091149 +0000 UTC m=+1214.541134800" watchObservedRunningTime="2025-12-04 15:22:43.659919521 +0000 UTC m=+1214.545963162" Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.685478 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-nx7vc" podStartSLOduration=6.617731521 podStartE2EDuration="13.68544858s" podCreationTimestamp="2025-12-04 15:22:30 +0000 UTC" firstStartedPulling="2025-12-04 15:22:35.136082316 +0000 UTC m=+1206.022125957" lastFinishedPulling="2025-12-04 15:22:42.203799375 +0000 UTC m=+1213.089843016" observedRunningTime="2025-12-04 15:22:43.676603958 +0000 UTC m=+1214.562647589" watchObservedRunningTime="2025-12-04 15:22:43.68544858 +0000 UTC m=+1214.571492221" Dec 04 15:22:43 crc kubenswrapper[4946]: I1204 15:22:43.710588 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" podStartSLOduration=13.710560517 podStartE2EDuration="13.710560517s" podCreationTimestamp="2025-12-04 15:22:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:22:43.701983292 +0000 UTC m=+1214.588026933" watchObservedRunningTime="2025-12-04 15:22:43.710560517 +0000 UTC m=+1214.596604168" Dec 04 15:22:44 crc kubenswrapper[4946]: I1204 15:22:44.588715 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:45 crc kubenswrapper[4946]: I1204 15:22:45.491705 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:45 crc kubenswrapper[4946]: I1204 15:22:45.522330 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 04 15:22:45 crc kubenswrapper[4946]: I1204 15:22:45.568248 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:45 crc kubenswrapper[4946]: I1204 15:22:45.602085 4946 generic.go:334] "Generic (PLEG): container finished" podID="a304ef91-9673-43d6-8b91-0ba511961217" containerID="be84699f1e5f63867e6a154b20cb43db1c774d4574a42f2ce36344aae11ae21e" exitCode=0 Dec 04 15:22:45 crc kubenswrapper[4946]: I1204 15:22:45.602245 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"a304ef91-9673-43d6-8b91-0ba511961217","Type":"ContainerDied","Data":"be84699f1e5f63867e6a154b20cb43db1c774d4574a42f2ce36344aae11ae21e"} Dec 04 15:22:45 crc kubenswrapper[4946]: I1204 15:22:45.612820 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"f89f1623-6a48-4db4-8059-940887046c8e","Type":"ContainerStarted","Data":"d1113f22397db85350bffcbee539a6c5591b347284e73699af4c516047a60f93"} Dec 04 15:22:45 crc kubenswrapper[4946]: I1204 15:22:45.613845 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:45 crc kubenswrapper[4946]: I1204 15:22:45.694365 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=11.623531266 podStartE2EDuration="36.694332189s" podCreationTimestamp="2025-12-04 15:22:09 +0000 UTC" firstStartedPulling="2025-12-04 15:22:19.656357017 +0000 UTC m=+1190.542400658" lastFinishedPulling="2025-12-04 15:22:44.72715794 +0000 UTC m=+1215.613201581" observedRunningTime="2025-12-04 15:22:45.68851893 +0000 UTC m=+1216.574562571" watchObservedRunningTime="2025-12-04 15:22:45.694332189 +0000 UTC m=+1216.580375840" Dec 04 15:22:45 crc kubenswrapper[4946]: I1204 15:22:45.703499 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 04 15:22:45 crc kubenswrapper[4946]: I1204 15:22:45.711014 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:45 crc kubenswrapper[4946]: I1204 15:22:45.812571 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-666b6646f7-jbxrv" podUID="de110676-babd-433c-b63e-cd66dc1bd512" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.106:5353: i/o timeout" Dec 04 15:22:46 crc kubenswrapper[4946]: I1204 15:22:46.627581 4946 generic.go:334] "Generic (PLEG): container finished" podID="f4481828-f464-47c6-a803-0c1962101efa" containerID="df1e7483f0fcfd97b3e3a7241aaae24f5fa48bebe91016ebba2b34b22f65ef80" exitCode=0 Dec 04 15:22:46 crc kubenswrapper[4946]: I1204 15:22:46.627660 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f4481828-f464-47c6-a803-0c1962101efa","Type":"ContainerDied","Data":"df1e7483f0fcfd97b3e3a7241aaae24f5fa48bebe91016ebba2b34b22f65ef80"} Dec 04 15:22:46 crc kubenswrapper[4946]: I1204 15:22:46.710934 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:47 crc kubenswrapper[4946]: I1204 15:22:47.648797 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"44a85e36-b029-4450-b8aa-11bf910d8139","Type":"ContainerStarted","Data":"2cebbb5b996075bbd87e9b370d70b42f610efde6482fd557971184eeae795bca"} Dec 04 15:22:49 crc kubenswrapper[4946]: I1204 15:22:49.681525 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"a304ef91-9673-43d6-8b91-0ba511961217","Type":"ContainerStarted","Data":"edcb4d484c60a9a7a20a372378420abbea44a1fc1f8f955dbd5175aa695d8b1a"} Dec 04 15:22:49 crc kubenswrapper[4946]: I1204 15:22:49.684048 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"43d26c42-eba9-4e5c-bd2d-7cdf7074a176","Type":"ContainerStarted","Data":"bafa86a10af48a7a0c9a2ceb67a815d149a59842159d817231bc75ff582ab6c7"} Dec 04 15:22:49 crc kubenswrapper[4946]: I1204 15:22:49.765847 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:50 crc kubenswrapper[4946]: I1204 15:22:50.759386 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.049319 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.055772 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 04 15:22:51 crc kubenswrapper[4946]: E1204 15:22:51.056423 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc305935-18fd-43f0-b1de-b588b49ea299" containerName="init" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.056449 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc305935-18fd-43f0-b1de-b588b49ea299" containerName="init" Dec 04 15:22:51 crc kubenswrapper[4946]: E1204 15:22:51.056473 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de110676-babd-433c-b63e-cd66dc1bd512" containerName="dnsmasq-dns" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.056496 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="de110676-babd-433c-b63e-cd66dc1bd512" containerName="dnsmasq-dns" Dec 04 15:22:51 crc kubenswrapper[4946]: E1204 15:22:51.056544 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de110676-babd-433c-b63e-cd66dc1bd512" containerName="init" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.056554 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="de110676-babd-433c-b63e-cd66dc1bd512" containerName="init" Dec 04 15:22:51 crc kubenswrapper[4946]: E1204 15:22:51.056572 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc305935-18fd-43f0-b1de-b588b49ea299" containerName="dnsmasq-dns" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.056585 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc305935-18fd-43f0-b1de-b588b49ea299" containerName="dnsmasq-dns" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.056830 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc305935-18fd-43f0-b1de-b588b49ea299" containerName="dnsmasq-dns" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.056874 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="de110676-babd-433c-b63e-cd66dc1bd512" containerName="dnsmasq-dns" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.058624 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.061777 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-pghsq" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.068680 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.083576 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.090088 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.112993 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.156326 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.156543 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.156624 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.156709 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-config\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.156838 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfjjp\" (UniqueName: \"kubernetes.io/projected/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-kube-api-access-gfjjp\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.156897 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-scripts\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.156994 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.260095 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.260256 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.260319 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.260377 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-config\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.260456 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfjjp\" (UniqueName: \"kubernetes.io/projected/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-kube-api-access-gfjjp\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.260495 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-scripts\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.260545 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.261379 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.263040 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-config\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.263082 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-scripts\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.352298 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.359836 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.482556 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-qmhjd"] Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.566041 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.567912 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.568967 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfjjp\" (UniqueName: \"kubernetes.io/projected/bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7-kube-api-access-gfjjp\") pod \"ovn-northd-0\" (UID: \"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7\") " pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.684826 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 04 15:22:51 crc kubenswrapper[4946]: I1204 15:22:51.707913 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" podUID="a2b934a8-2f23-4bcc-93bf-54adbdc19769" containerName="dnsmasq-dns" containerID="cri-o://6c6191e413c0bd4c91a90fe6f4a5e565cf5a1173dfeba987715692f002e6da58" gracePeriod=10 Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.365641 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-q8nbg"] Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.369058 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.374270 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.399474 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-q8nbg"] Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.481695 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.481774 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.481836 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.482978 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8a7e012c140a228f19f6cb14e9f9072b524033b54270efdccbdb4e5e1c52ef9d"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.483043 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://8a7e012c140a228f19f6cb14e9f9072b524033b54270efdccbdb4e5e1c52ef9d" gracePeriod=600 Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.499661 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzd8c\" (UniqueName: \"kubernetes.io/projected/b3d07afb-2b13-4962-a233-2fc779f21f68-kube-api-access-dzd8c\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.500202 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-dns-svc\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.500294 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.500383 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-config\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.500430 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.602983 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.603104 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzd8c\" (UniqueName: \"kubernetes.io/projected/b3d07afb-2b13-4962-a233-2fc779f21f68-kube-api-access-dzd8c\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.603238 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-dns-svc\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.603263 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.603288 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-config\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.604207 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-config\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.604251 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.607899 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-dns-svc\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.608533 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.667222 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzd8c\" (UniqueName: \"kubernetes.io/projected/b3d07afb-2b13-4962-a233-2fc779f21f68-kube-api-access-dzd8c\") pod \"dnsmasq-dns-698758b865-q8nbg\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.698758 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.726589 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"a304ef91-9673-43d6-8b91-0ba511961217","Type":"ContainerStarted","Data":"f7d289c14be41a58cfde4aef255b17f26a62236cb6bb66ba1896220ab824d129"} Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.727283 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.732239 4946 generic.go:334] "Generic (PLEG): container finished" podID="a2b934a8-2f23-4bcc-93bf-54adbdc19769" containerID="6c6191e413c0bd4c91a90fe6f4a5e565cf5a1173dfeba987715692f002e6da58" exitCode=0 Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.732352 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" event={"ID":"a2b934a8-2f23-4bcc-93bf-54adbdc19769","Type":"ContainerDied","Data":"6c6191e413c0bd4c91a90fe6f4a5e565cf5a1173dfeba987715692f002e6da58"} Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.733608 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.737166 4946 generic.go:334] "Generic (PLEG): container finished" podID="44a85e36-b029-4450-b8aa-11bf910d8139" containerID="2cebbb5b996075bbd87e9b370d70b42f610efde6482fd557971184eeae795bca" exitCode=0 Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.737232 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"44a85e36-b029-4450-b8aa-11bf910d8139","Type":"ContainerDied","Data":"2cebbb5b996075bbd87e9b370d70b42f610efde6482fd557971184eeae795bca"} Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.743437 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="8a7e012c140a228f19f6cb14e9f9072b524033b54270efdccbdb4e5e1c52ef9d" exitCode=0 Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.743491 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"8a7e012c140a228f19f6cb14e9f9072b524033b54270efdccbdb4e5e1c52ef9d"} Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.743541 4946 scope.go:117] "RemoveContainer" containerID="53eeb1a5a8af1654e1978db4066dd9d62d695280b47fdbadb0ee39d16803c85c" Dec 04 15:22:52 crc kubenswrapper[4946]: I1204 15:22:52.756002 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=19.431112238 podStartE2EDuration="50.755975275s" podCreationTimestamp="2025-12-04 15:22:02 +0000 UTC" firstStartedPulling="2025-12-04 15:22:17.694959748 +0000 UTC m=+1188.581003389" lastFinishedPulling="2025-12-04 15:22:49.019822785 +0000 UTC m=+1219.905866426" observedRunningTime="2025-12-04 15:22:52.754684199 +0000 UTC m=+1223.640727840" watchObservedRunningTime="2025-12-04 15:22:52.755975275 +0000 UTC m=+1223.642018916" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.543301 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.559504 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.560057 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.566314 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.566658 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.566799 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.566950 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-8898n" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.636642 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.636941 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwkfl\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-kube-api-access-hwkfl\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.637182 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c2ce540a-9d75-414e-bfee-25f5e17dab6d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2ce540a-9d75-414e-bfee-25f5e17dab6d\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.637231 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b7676ab4-212c-4e17-a84a-0979a65936d1-cache\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.637334 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b7676ab4-212c-4e17-a84a-0979a65936d1-lock\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.758742 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b7676ab4-212c-4e17-a84a-0979a65936d1-lock\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.759138 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.759250 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwkfl\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-kube-api-access-hwkfl\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.759483 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c2ce540a-9d75-414e-bfee-25f5e17dab6d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2ce540a-9d75-414e-bfee-25f5e17dab6d\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.759579 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b7676ab4-212c-4e17-a84a-0979a65936d1-cache\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.760205 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b7676ab4-212c-4e17-a84a-0979a65936d1-cache\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: E1204 15:22:53.760479 4946 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 04 15:22:53 crc kubenswrapper[4946]: E1204 15:22:53.760547 4946 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 04 15:22:53 crc kubenswrapper[4946]: E1204 15:22:53.760682 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift podName:b7676ab4-212c-4e17-a84a-0979a65936d1 nodeName:}" failed. No retries permitted until 2025-12-04 15:22:54.26062077 +0000 UTC m=+1225.146664411 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift") pod "swift-storage-0" (UID: "b7676ab4-212c-4e17-a84a-0979a65936d1") : configmap "swift-ring-files" not found Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.761918 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b7676ab4-212c-4e17-a84a-0979a65936d1-lock\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.772882 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.772932 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c2ce540a-9d75-414e-bfee-25f5e17dab6d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2ce540a-9d75-414e-bfee-25f5e17dab6d\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2e6f9e8ea680aac253350a578dcb88ad666cdfdffd773c3ea1384f638d46b5f2/globalmount\"" pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.791192 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwkfl\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-kube-api-access-hwkfl\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:53 crc kubenswrapper[4946]: I1204 15:22:53.838781 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c2ce540a-9d75-414e-bfee-25f5e17dab6d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2ce540a-9d75-414e-bfee-25f5e17dab6d\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:54 crc kubenswrapper[4946]: I1204 15:22:54.274414 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:54 crc kubenswrapper[4946]: E1204 15:22:54.274735 4946 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 04 15:22:54 crc kubenswrapper[4946]: E1204 15:22:54.275708 4946 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 04 15:22:54 crc kubenswrapper[4946]: E1204 15:22:54.275856 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift podName:b7676ab4-212c-4e17-a84a-0979a65936d1 nodeName:}" failed. No retries permitted until 2025-12-04 15:22:55.275819686 +0000 UTC m=+1226.161863327 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift") pod "swift-storage-0" (UID: "b7676ab4-212c-4e17-a84a-0979a65936d1") : configmap "swift-ring-files" not found Dec 04 15:22:54 crc kubenswrapper[4946]: I1204 15:22:54.780611 4946 generic.go:334] "Generic (PLEG): container finished" podID="43d26c42-eba9-4e5c-bd2d-7cdf7074a176" containerID="bafa86a10af48a7a0c9a2ceb67a815d149a59842159d817231bc75ff582ab6c7" exitCode=0 Dec 04 15:22:54 crc kubenswrapper[4946]: I1204 15:22:54.780720 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"43d26c42-eba9-4e5c-bd2d-7cdf7074a176","Type":"ContainerDied","Data":"bafa86a10af48a7a0c9a2ceb67a815d149a59842159d817231bc75ff582ab6c7"} Dec 04 15:22:55 crc kubenswrapper[4946]: I1204 15:22:55.170679 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-76w8c" Dec 04 15:22:55 crc kubenswrapper[4946]: I1204 15:22:55.334073 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:55 crc kubenswrapper[4946]: E1204 15:22:55.334385 4946 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 04 15:22:55 crc kubenswrapper[4946]: E1204 15:22:55.334572 4946 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 04 15:22:55 crc kubenswrapper[4946]: E1204 15:22:55.334674 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift podName:b7676ab4-212c-4e17-a84a-0979a65936d1 nodeName:}" failed. No retries permitted until 2025-12-04 15:22:57.334638894 +0000 UTC m=+1228.220682535 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift") pod "swift-storage-0" (UID: "b7676ab4-212c-4e17-a84a-0979a65936d1") : configmap "swift-ring-files" not found Dec 04 15:22:55 crc kubenswrapper[4946]: I1204 15:22:55.466873 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-wwslq" Dec 04 15:22:55 crc kubenswrapper[4946]: I1204 15:22:55.672630 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn" Dec 04 15:22:56 crc kubenswrapper[4946]: I1204 15:22:56.047798 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" podUID="a2b934a8-2f23-4bcc-93bf-54adbdc19769" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: connect: connection refused" Dec 04 15:22:56 crc kubenswrapper[4946]: I1204 15:22:56.340706 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="decd9bb2-7749-48ff-b886-74e49bf5222d" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 04 15:22:56 crc kubenswrapper[4946]: I1204 15:22:56.525099 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 04 15:22:56 crc kubenswrapper[4946]: I1204 15:22:56.686480 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.124079 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.214652 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-dns-svc\") pod \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.214715 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-ovsdbserver-nb\") pod \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.214743 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-config\") pod \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.214822 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlcpb\" (UniqueName: \"kubernetes.io/projected/a2b934a8-2f23-4bcc-93bf-54adbdc19769-kube-api-access-rlcpb\") pod \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\" (UID: \"a2b934a8-2f23-4bcc-93bf-54adbdc19769\") " Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.225645 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2b934a8-2f23-4bcc-93bf-54adbdc19769-kube-api-access-rlcpb" (OuterVolumeSpecName: "kube-api-access-rlcpb") pod "a2b934a8-2f23-4bcc-93bf-54adbdc19769" (UID: "a2b934a8-2f23-4bcc-93bf-54adbdc19769"). InnerVolumeSpecName "kube-api-access-rlcpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.321215 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlcpb\" (UniqueName: \"kubernetes.io/projected/a2b934a8-2f23-4bcc-93bf-54adbdc19769-kube-api-access-rlcpb\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.327242 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-config" (OuterVolumeSpecName: "config") pod "a2b934a8-2f23-4bcc-93bf-54adbdc19769" (UID: "a2b934a8-2f23-4bcc-93bf-54adbdc19769"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.347802 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a2b934a8-2f23-4bcc-93bf-54adbdc19769" (UID: "a2b934a8-2f23-4bcc-93bf-54adbdc19769"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.359336 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a2b934a8-2f23-4bcc-93bf-54adbdc19769" (UID: "a2b934a8-2f23-4bcc-93bf-54adbdc19769"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.419211 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.425471 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.425582 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.425595 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.425606 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2b934a8-2f23-4bcc-93bf-54adbdc19769-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:22:57 crc kubenswrapper[4946]: E1204 15:22:57.425816 4946 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 04 15:22:57 crc kubenswrapper[4946]: E1204 15:22:57.425831 4946 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 04 15:22:57 crc kubenswrapper[4946]: E1204 15:22:57.425881 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift podName:b7676ab4-212c-4e17-a84a-0979a65936d1 nodeName:}" failed. No retries permitted until 2025-12-04 15:23:01.425865619 +0000 UTC m=+1232.311909260 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift") pod "swift-storage-0" (UID: "b7676ab4-212c-4e17-a84a-0979a65936d1") : configmap "swift-ring-files" not found Dec 04 15:22:57 crc kubenswrapper[4946]: W1204 15:22:57.436084 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3d07afb_2b13_4962_a233_2fc779f21f68.slice/crio-f7acbf6b1a34060f11c771df28cd2280b07f824ca149417a128b764939aefba4 WatchSource:0}: Error finding container f7acbf6b1a34060f11c771df28cd2280b07f824ca149417a128b764939aefba4: Status 404 returned error can't find the container with id f7acbf6b1a34060f11c771df28cd2280b07f824ca149417a128b764939aefba4 Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.445288 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-q8nbg"] Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.569266 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-rnp6k"] Dec 04 15:22:57 crc kubenswrapper[4946]: E1204 15:22:57.570315 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2b934a8-2f23-4bcc-93bf-54adbdc19769" containerName="dnsmasq-dns" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.570336 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2b934a8-2f23-4bcc-93bf-54adbdc19769" containerName="dnsmasq-dns" Dec 04 15:22:57 crc kubenswrapper[4946]: E1204 15:22:57.570353 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2b934a8-2f23-4bcc-93bf-54adbdc19769" containerName="init" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.570360 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2b934a8-2f23-4bcc-93bf-54adbdc19769" containerName="init" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.570586 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2b934a8-2f23-4bcc-93bf-54adbdc19769" containerName="dnsmasq-dns" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.571539 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.576002 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.576232 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.576347 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.593373 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-rnp6k"] Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.630291 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-ring-data-devices\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.630396 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-combined-ca-bundle\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.630434 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-dispersionconf\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.630481 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-etc-swift\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.630549 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-swiftconf\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.630581 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjdk9\" (UniqueName: \"kubernetes.io/projected/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-kube-api-access-pjdk9\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.630614 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-scripts\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.732474 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-swiftconf\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.732530 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjdk9\" (UniqueName: \"kubernetes.io/projected/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-kube-api-access-pjdk9\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.732560 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-scripts\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.732640 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-ring-data-devices\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.732698 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-combined-ca-bundle\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.732725 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-dispersionconf\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.732762 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-etc-swift\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.733344 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-etc-swift\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.734085 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-ring-data-devices\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.735728 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-scripts\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.738192 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-swiftconf\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.739009 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-combined-ca-bundle\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.740170 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-dispersionconf\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.754777 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjdk9\" (UniqueName: \"kubernetes.io/projected/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-kube-api-access-pjdk9\") pod \"swift-ring-rebalance-rnp6k\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.896835 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"43d26c42-eba9-4e5c-bd2d-7cdf7074a176","Type":"ContainerStarted","Data":"491c17e8cc2133676bca55ad1ba5981be26e796b1973432d083a7fb1d8e98a31"} Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.902440 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-q8nbg" event={"ID":"b3d07afb-2b13-4962-a233-2fc779f21f68","Type":"ContainerStarted","Data":"f7acbf6b1a34060f11c771df28cd2280b07f824ca149417a128b764939aefba4"} Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.907396 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"a4672e78e5cc3d2ad9bab9f7368c2628b00d850ecbb6c4792dcaf037af3ed10c"} Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.913728 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f4481828-f464-47c6-a803-0c1962101efa","Type":"ContainerStarted","Data":"0193c8b0a40b2db19afee087737226c05fe497fed21a90bc904a851c48f76fc1"} Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.923957 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.924014 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-qmhjd" event={"ID":"a2b934a8-2f23-4bcc-93bf-54adbdc19769","Type":"ContainerDied","Data":"af1288c982425beb7b1e98cdb1d0c84d8bfdb336e6cd8822d63d10df5747d1bd"} Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.924102 4946 scope.go:117] "RemoveContainer" containerID="6c6191e413c0bd4c91a90fe6f4a5e565cf5a1173dfeba987715692f002e6da58" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.934103 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"44a85e36-b029-4450-b8aa-11bf910d8139","Type":"ContainerStarted","Data":"1d1c75a549d0f5e71c6dc022002de22585f146e02e04394a16dca3c9bf66c3a9"} Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.937386 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371975.917412 podStartE2EDuration="1m0.937363943s" podCreationTimestamp="2025-12-04 15:21:57 +0000 UTC" firstStartedPulling="2025-12-04 15:22:18.624101546 +0000 UTC m=+1189.510145187" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:22:57.923089952 +0000 UTC m=+1228.809133593" watchObservedRunningTime="2025-12-04 15:22:57.937363943 +0000 UTC m=+1228.823407584" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.938748 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7","Type":"ContainerStarted","Data":"fe96947df0c75f4304516418b7ff8775dd4014e98cf111680ba2a1d9e909abf4"} Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.963097 4946 scope.go:117] "RemoveContainer" containerID="73acd6ab19a95726068de4e802adfb20c0218f4fceebd598b13f13ee14143ccd" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.984163 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:22:57 crc kubenswrapper[4946]: I1204 15:22:57.984620 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=31.520032138 podStartE2EDuration="59.984599106s" podCreationTimestamp="2025-12-04 15:21:58 +0000 UTC" firstStartedPulling="2025-12-04 15:22:18.435667997 +0000 UTC m=+1189.321711638" lastFinishedPulling="2025-12-04 15:22:46.900234965 +0000 UTC m=+1217.786278606" observedRunningTime="2025-12-04 15:22:57.975333952 +0000 UTC m=+1228.861377593" watchObservedRunningTime="2025-12-04 15:22:57.984599106 +0000 UTC m=+1228.870642737" Dec 04 15:22:58 crc kubenswrapper[4946]: I1204 15:22:58.002023 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-qmhjd"] Dec 04 15:22:58 crc kubenswrapper[4946]: I1204 15:22:58.011655 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-qmhjd"] Dec 04 15:22:58 crc kubenswrapper[4946]: I1204 15:22:58.387453 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-rnp6k"] Dec 04 15:22:58 crc kubenswrapper[4946]: I1204 15:22:58.954804 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rnp6k" event={"ID":"f2266dde-4870-46a5-9c4a-c348c6c4d4ed","Type":"ContainerStarted","Data":"14d2c52052252d5a87359a60335c104e589cb8029a0313c9b763b638dac660ef"} Dec 04 15:22:58 crc kubenswrapper[4946]: I1204 15:22:58.958466 4946 generic.go:334] "Generic (PLEG): container finished" podID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerID="035f3b1e9084cb9762645b18e5340a5aecb57620cb9f5abb332e59e6c93cdd19" exitCode=0 Dec 04 15:22:58 crc kubenswrapper[4946]: I1204 15:22:58.959697 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-q8nbg" event={"ID":"b3d07afb-2b13-4962-a233-2fc779f21f68","Type":"ContainerDied","Data":"035f3b1e9084cb9762645b18e5340a5aecb57620cb9f5abb332e59e6c93cdd19"} Dec 04 15:22:59 crc kubenswrapper[4946]: I1204 15:22:59.056658 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 04 15:22:59 crc kubenswrapper[4946]: I1204 15:22:59.056735 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 04 15:22:59 crc kubenswrapper[4946]: I1204 15:22:59.467098 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2b934a8-2f23-4bcc-93bf-54adbdc19769" path="/var/lib/kubelet/pods/a2b934a8-2f23-4bcc-93bf-54adbdc19769/volumes" Dec 04 15:22:59 crc kubenswrapper[4946]: I1204 15:22:59.978911 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-q8nbg" event={"ID":"b3d07afb-2b13-4962-a233-2fc779f21f68","Type":"ContainerStarted","Data":"cf3a54599d48a3aa5ac927ea96645e60a0b407ae37ce540b2dcc44382f2c8032"} Dec 04 15:22:59 crc kubenswrapper[4946]: I1204 15:22:59.979722 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:23:00 crc kubenswrapper[4946]: I1204 15:23:00.012264 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-q8nbg" podStartSLOduration=8.012242769 podStartE2EDuration="8.012242769s" podCreationTimestamp="2025-12-04 15:22:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:00.005588027 +0000 UTC m=+1230.891631668" watchObservedRunningTime="2025-12-04 15:23:00.012242769 +0000 UTC m=+1230.898286410" Dec 04 15:23:00 crc kubenswrapper[4946]: I1204 15:23:00.213207 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 04 15:23:00 crc kubenswrapper[4946]: I1204 15:23:00.213789 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 04 15:23:00 crc kubenswrapper[4946]: I1204 15:23:00.997174 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f4481828-f464-47c6-a803-0c1962101efa","Type":"ContainerStarted","Data":"d5f65d3dc4cc9f8a234f9b1d6868d7b9a3e108491b68a9a42129c58b18aeb74d"} Dec 04 15:23:01 crc kubenswrapper[4946]: I1204 15:23:01.001051 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7","Type":"ContainerStarted","Data":"013892c0b79639eef8bf816cf6796e8de7aeea450212e1a148d45e50e3a196bb"} Dec 04 15:23:01 crc kubenswrapper[4946]: I1204 15:23:01.438992 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:23:01 crc kubenswrapper[4946]: E1204 15:23:01.439304 4946 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 04 15:23:01 crc kubenswrapper[4946]: E1204 15:23:01.439369 4946 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 04 15:23:01 crc kubenswrapper[4946]: E1204 15:23:01.439475 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift podName:b7676ab4-212c-4e17-a84a-0979a65936d1 nodeName:}" failed. No retries permitted until 2025-12-04 15:23:09.439444863 +0000 UTC m=+1240.325488514 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift") pod "swift-storage-0" (UID: "b7676ab4-212c-4e17-a84a-0979a65936d1") : configmap "swift-ring-files" not found Dec 04 15:23:03 crc kubenswrapper[4946]: I1204 15:23:03.258106 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 04 15:23:03 crc kubenswrapper[4946]: I1204 15:23:03.362299 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.053901 4946 generic.go:334] "Generic (PLEG): container finished" podID="920eb4d8-3aa1-4141-9f65-647e275405e4" containerID="743ddc87efc06da47d02b8cad5e61ba2b5c71fb18761848f5cb719610cc67896" exitCode=0 Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.053997 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"920eb4d8-3aa1-4141-9f65-647e275405e4","Type":"ContainerDied","Data":"743ddc87efc06da47d02b8cad5e61ba2b5c71fb18761848f5cb719610cc67896"} Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.059156 4946 generic.go:334] "Generic (PLEG): container finished" podID="f65583d1-046b-463a-9101-2074072a94f0" containerID="4af82c0e7141a08c616b14294ff018b51e2011189f2e35b47b77c81800165efa" exitCode=0 Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.059205 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f65583d1-046b-463a-9101-2074072a94f0","Type":"ContainerDied","Data":"4af82c0e7141a08c616b14294ff018b51e2011189f2e35b47b77c81800165efa"} Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.879322 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-3c1c-account-create-update-hv5bl"] Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.881550 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3c1c-account-create-update-hv5bl" Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.885194 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.895807 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-mwbv8"] Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.897515 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-mwbv8" Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.911902 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-3c1c-account-create-update-hv5bl"] Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.928858 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-mwbv8"] Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.976854 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11b8d15d-d063-478d-8f4f-82d950f9aa2f-operator-scripts\") pod \"glance-db-create-mwbv8\" (UID: \"11b8d15d-d063-478d-8f4f-82d950f9aa2f\") " pod="openstack/glance-db-create-mwbv8" Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.977357 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-operator-scripts\") pod \"glance-3c1c-account-create-update-hv5bl\" (UID: \"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921\") " pod="openstack/glance-3c1c-account-create-update-hv5bl" Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.977435 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fdfb\" (UniqueName: \"kubernetes.io/projected/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-kube-api-access-9fdfb\") pod \"glance-3c1c-account-create-update-hv5bl\" (UID: \"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921\") " pod="openstack/glance-3c1c-account-create-update-hv5bl" Dec 04 15:23:05 crc kubenswrapper[4946]: I1204 15:23:05.977479 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2twd\" (UniqueName: \"kubernetes.io/projected/11b8d15d-d063-478d-8f4f-82d950f9aa2f-kube-api-access-b2twd\") pod \"glance-db-create-mwbv8\" (UID: \"11b8d15d-d063-478d-8f4f-82d950f9aa2f\") " pod="openstack/glance-db-create-mwbv8" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.079368 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"920eb4d8-3aa1-4141-9f65-647e275405e4","Type":"ContainerStarted","Data":"61250f67537c021bfba802c2cf1e24e1c4b4ad54a1378fe5f316a10c39333c42"} Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.079746 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.082476 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11b8d15d-d063-478d-8f4f-82d950f9aa2f-operator-scripts\") pod \"glance-db-create-mwbv8\" (UID: \"11b8d15d-d063-478d-8f4f-82d950f9aa2f\") " pod="openstack/glance-db-create-mwbv8" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.082678 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-operator-scripts\") pod \"glance-3c1c-account-create-update-hv5bl\" (UID: \"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921\") " pod="openstack/glance-3c1c-account-create-update-hv5bl" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.082927 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fdfb\" (UniqueName: \"kubernetes.io/projected/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-kube-api-access-9fdfb\") pod \"glance-3c1c-account-create-update-hv5bl\" (UID: \"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921\") " pod="openstack/glance-3c1c-account-create-update-hv5bl" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.083073 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2twd\" (UniqueName: \"kubernetes.io/projected/11b8d15d-d063-478d-8f4f-82d950f9aa2f-kube-api-access-b2twd\") pod \"glance-db-create-mwbv8\" (UID: \"11b8d15d-d063-478d-8f4f-82d950f9aa2f\") " pod="openstack/glance-db-create-mwbv8" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.084496 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-operator-scripts\") pod \"glance-3c1c-account-create-update-hv5bl\" (UID: \"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921\") " pod="openstack/glance-3c1c-account-create-update-hv5bl" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.084734 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11b8d15d-d063-478d-8f4f-82d950f9aa2f-operator-scripts\") pod \"glance-db-create-mwbv8\" (UID: \"11b8d15d-d063-478d-8f4f-82d950f9aa2f\") " pod="openstack/glance-db-create-mwbv8" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.088200 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f65583d1-046b-463a-9101-2074072a94f0","Type":"ContainerStarted","Data":"793aaf1a1a7437e7e5a4a080f87f02b82e6a64f68c3b9c9cfcb825f04c561997"} Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.089099 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.090648 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rnp6k" event={"ID":"f2266dde-4870-46a5-9c4a-c348c6c4d4ed","Type":"ContainerStarted","Data":"a6a3bcae5c0f1ff9ff5e72e6a5caf26107aaee9d3cb91b4ac7f3de46f0b7ab4b"} Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.107389 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7","Type":"ContainerStarted","Data":"3bad8571e38bb42b20087345d24182ed376a1bd2824b874d3a9e8c8e6d17aef8"} Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.107958 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.108859 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fdfb\" (UniqueName: \"kubernetes.io/projected/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-kube-api-access-9fdfb\") pod \"glance-3c1c-account-create-update-hv5bl\" (UID: \"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921\") " pod="openstack/glance-3c1c-account-create-update-hv5bl" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.115677 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2twd\" (UniqueName: \"kubernetes.io/projected/11b8d15d-d063-478d-8f4f-82d950f9aa2f-kube-api-access-b2twd\") pod \"glance-db-create-mwbv8\" (UID: \"11b8d15d-d063-478d-8f4f-82d950f9aa2f\") " pod="openstack/glance-db-create-mwbv8" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.154769 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=50.402603743 podStartE2EDuration="1m11.154712899s" podCreationTimestamp="2025-12-04 15:21:55 +0000 UTC" firstStartedPulling="2025-12-04 15:21:57.578202607 +0000 UTC m=+1168.464246248" lastFinishedPulling="2025-12-04 15:22:18.330311763 +0000 UTC m=+1189.216355404" observedRunningTime="2025-12-04 15:23:06.14818938 +0000 UTC m=+1237.034233021" watchObservedRunningTime="2025-12-04 15:23:06.154712899 +0000 UTC m=+1237.040756540" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.204942 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=50.451784126 podStartE2EDuration="1m10.204906403s" podCreationTimestamp="2025-12-04 15:21:56 +0000 UTC" firstStartedPulling="2025-12-04 15:21:58.40968345 +0000 UTC m=+1169.295727091" lastFinishedPulling="2025-12-04 15:22:18.162805727 +0000 UTC m=+1189.048849368" observedRunningTime="2025-12-04 15:23:06.195798253 +0000 UTC m=+1237.081841894" watchObservedRunningTime="2025-12-04 15:23:06.204906403 +0000 UTC m=+1237.090950044" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.230388 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3c1c-account-create-update-hv5bl" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.232499 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=13.053571853 podStartE2EDuration="15.232487078s" podCreationTimestamp="2025-12-04 15:22:51 +0000 UTC" firstStartedPulling="2025-12-04 15:22:57.431585565 +0000 UTC m=+1228.317629206" lastFinishedPulling="2025-12-04 15:22:59.6105008 +0000 UTC m=+1230.496544431" observedRunningTime="2025-12-04 15:23:06.224608742 +0000 UTC m=+1237.110652383" watchObservedRunningTime="2025-12-04 15:23:06.232487078 +0000 UTC m=+1237.118530709" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.256926 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-mwbv8" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.277194 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-rnp6k" podStartSLOduration=2.806877575 podStartE2EDuration="9.277171071s" podCreationTimestamp="2025-12-04 15:22:57 +0000 UTC" firstStartedPulling="2025-12-04 15:22:58.410865806 +0000 UTC m=+1229.296909447" lastFinishedPulling="2025-12-04 15:23:04.881159302 +0000 UTC m=+1235.767202943" observedRunningTime="2025-12-04 15:23:06.2585017 +0000 UTC m=+1237.144545341" watchObservedRunningTime="2025-12-04 15:23:06.277171071 +0000 UTC m=+1237.163214712" Dec 04 15:23:06 crc kubenswrapper[4946]: I1204 15:23:06.352461 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="decd9bb2-7749-48ff-b886-74e49bf5222d" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 04 15:23:07 crc kubenswrapper[4946]: I1204 15:23:07.031035 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-3c1c-account-create-update-hv5bl"] Dec 04 15:23:07 crc kubenswrapper[4946]: I1204 15:23:07.064387 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-mwbv8"] Dec 04 15:23:07 crc kubenswrapper[4946]: I1204 15:23:07.121671 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-3c1c-account-create-update-hv5bl" event={"ID":"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921","Type":"ContainerStarted","Data":"073a8e0976a5a0cbf96e991298e807147333428ba6d9fb125732be802c01a2e2"} Dec 04 15:23:07 crc kubenswrapper[4946]: I1204 15:23:07.130456 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-mwbv8" event={"ID":"11b8d15d-d063-478d-8f4f-82d950f9aa2f","Type":"ContainerStarted","Data":"7eeffd9a6e5d0019d02202875cf0bc0e4900bc422fea13f4a8f662ab74f08865"} Dec 04 15:23:07 crc kubenswrapper[4946]: I1204 15:23:07.532408 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-qv4hw" podUID="2734e466-178a-4344-bfac-9adb5e4492a7" containerName="ovn-controller" probeResult="failure" output=< Dec 04 15:23:07 crc kubenswrapper[4946]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 04 15:23:07 crc kubenswrapper[4946]: > Dec 04 15:23:07 crc kubenswrapper[4946]: I1204 15:23:07.599596 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 04 15:23:07 crc kubenswrapper[4946]: I1204 15:23:07.701408 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:23:07 crc kubenswrapper[4946]: I1204 15:23:07.824900 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-nq7lb"] Dec 04 15:23:07 crc kubenswrapper[4946]: I1204 15:23:07.826103 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" podUID="0181c3d1-7d0e-4959-a9cb-e4e00e01188a" containerName="dnsmasq-dns" containerID="cri-o://012375251ecb14ff1f4a9357bc9da82be53eb5622f52eda1bd8d8d00c9fc5f14" gracePeriod=10 Dec 04 15:23:07 crc kubenswrapper[4946]: I1204 15:23:07.844345 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 04 15:23:08 crc kubenswrapper[4946]: I1204 15:23:08.177329 4946 generic.go:334] "Generic (PLEG): container finished" podID="513bc3b9-8ae8-4e8b-b02d-fb17f48f7921" containerID="f19688e673391624288c47de1f217dd8e9396af4280d16ba125328710436b3fc" exitCode=0 Dec 04 15:23:08 crc kubenswrapper[4946]: I1204 15:23:08.177436 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-3c1c-account-create-update-hv5bl" event={"ID":"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921","Type":"ContainerDied","Data":"f19688e673391624288c47de1f217dd8e9396af4280d16ba125328710436b3fc"} Dec 04 15:23:08 crc kubenswrapper[4946]: I1204 15:23:08.189176 4946 generic.go:334] "Generic (PLEG): container finished" podID="0181c3d1-7d0e-4959-a9cb-e4e00e01188a" containerID="012375251ecb14ff1f4a9357bc9da82be53eb5622f52eda1bd8d8d00c9fc5f14" exitCode=0 Dec 04 15:23:08 crc kubenswrapper[4946]: I1204 15:23:08.189309 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" event={"ID":"0181c3d1-7d0e-4959-a9cb-e4e00e01188a","Type":"ContainerDied","Data":"012375251ecb14ff1f4a9357bc9da82be53eb5622f52eda1bd8d8d00c9fc5f14"} Dec 04 15:23:08 crc kubenswrapper[4946]: I1204 15:23:08.219712 4946 generic.go:334] "Generic (PLEG): container finished" podID="11b8d15d-d063-478d-8f4f-82d950f9aa2f" containerID="9d713b888216b499c051e34e954e944be7f05ef6f85ca36968fe31de92d5cd0b" exitCode=0 Dec 04 15:23:08 crc kubenswrapper[4946]: I1204 15:23:08.220150 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-mwbv8" event={"ID":"11b8d15d-d063-478d-8f4f-82d950f9aa2f","Type":"ContainerDied","Data":"9d713b888216b499c051e34e954e944be7f05ef6f85ca36968fe31de92d5cd0b"} Dec 04 15:23:08 crc kubenswrapper[4946]: E1204 15:23:08.249107 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod513bc3b9_8ae8_4e8b_b02d_fb17f48f7921.slice/crio-f19688e673391624288c47de1f217dd8e9396af4280d16ba125328710436b3fc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod513bc3b9_8ae8_4e8b_b02d_fb17f48f7921.slice/crio-conmon-f19688e673391624288c47de1f217dd8e9396af4280d16ba125328710436b3fc.scope\": RecentStats: unable to find data in memory cache]" Dec 04 15:23:09 crc kubenswrapper[4946]: I1204 15:23:09.443090 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:23:09 crc kubenswrapper[4946]: E1204 15:23:09.443293 4946 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 04 15:23:09 crc kubenswrapper[4946]: E1204 15:23:09.444518 4946 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 04 15:23:09 crc kubenswrapper[4946]: E1204 15:23:09.444593 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift podName:b7676ab4-212c-4e17-a84a-0979a65936d1 nodeName:}" failed. No retries permitted until 2025-12-04 15:23:25.444566339 +0000 UTC m=+1256.330609980 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift") pod "swift-storage-0" (UID: "b7676ab4-212c-4e17-a84a-0979a65936d1") : configmap "swift-ring-files" not found Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.069358 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-7x4qr"] Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.071866 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7x4qr" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.107546 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-7x4qr"] Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.133551 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-0353-account-create-update-cqjvb"] Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.141028 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-0353-account-create-update-cqjvb" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.150687 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.169436 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-0353-account-create-update-cqjvb"] Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.174725 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3825a065-e047-480e-a2a0-3aa2a1bdba24-operator-scripts\") pod \"keystone-db-create-7x4qr\" (UID: \"3825a065-e047-480e-a2a0-3aa2a1bdba24\") " pod="openstack/keystone-db-create-7x4qr" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.174902 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62kl7\" (UniqueName: \"kubernetes.io/projected/3825a065-e047-480e-a2a0-3aa2a1bdba24-kube-api-access-62kl7\") pod \"keystone-db-create-7x4qr\" (UID: \"3825a065-e047-480e-a2a0-3aa2a1bdba24\") " pod="openstack/keystone-db-create-7x4qr" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.284171 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62kl7\" (UniqueName: \"kubernetes.io/projected/3825a065-e047-480e-a2a0-3aa2a1bdba24-kube-api-access-62kl7\") pod \"keystone-db-create-7x4qr\" (UID: \"3825a065-e047-480e-a2a0-3aa2a1bdba24\") " pod="openstack/keystone-db-create-7x4qr" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.284236 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvqfd\" (UniqueName: \"kubernetes.io/projected/77ca1c4a-5e40-484a-ab87-c6cedf677c47-kube-api-access-bvqfd\") pod \"keystone-0353-account-create-update-cqjvb\" (UID: \"77ca1c4a-5e40-484a-ab87-c6cedf677c47\") " pod="openstack/keystone-0353-account-create-update-cqjvb" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.286125 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77ca1c4a-5e40-484a-ab87-c6cedf677c47-operator-scripts\") pod \"keystone-0353-account-create-update-cqjvb\" (UID: \"77ca1c4a-5e40-484a-ab87-c6cedf677c47\") " pod="openstack/keystone-0353-account-create-update-cqjvb" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.286627 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3825a065-e047-480e-a2a0-3aa2a1bdba24-operator-scripts\") pod \"keystone-db-create-7x4qr\" (UID: \"3825a065-e047-480e-a2a0-3aa2a1bdba24\") " pod="openstack/keystone-db-create-7x4qr" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.287869 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3825a065-e047-480e-a2a0-3aa2a1bdba24-operator-scripts\") pod \"keystone-db-create-7x4qr\" (UID: \"3825a065-e047-480e-a2a0-3aa2a1bdba24\") " pod="openstack/keystone-db-create-7x4qr" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.314344 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62kl7\" (UniqueName: \"kubernetes.io/projected/3825a065-e047-480e-a2a0-3aa2a1bdba24-kube-api-access-62kl7\") pod \"keystone-db-create-7x4qr\" (UID: \"3825a065-e047-480e-a2a0-3aa2a1bdba24\") " pod="openstack/keystone-db-create-7x4qr" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.389365 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-ggqwz"] Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.389836 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvqfd\" (UniqueName: \"kubernetes.io/projected/77ca1c4a-5e40-484a-ab87-c6cedf677c47-kube-api-access-bvqfd\") pod \"keystone-0353-account-create-update-cqjvb\" (UID: \"77ca1c4a-5e40-484a-ab87-c6cedf677c47\") " pod="openstack/keystone-0353-account-create-update-cqjvb" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.390005 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77ca1c4a-5e40-484a-ab87-c6cedf677c47-operator-scripts\") pod \"keystone-0353-account-create-update-cqjvb\" (UID: \"77ca1c4a-5e40-484a-ab87-c6cedf677c47\") " pod="openstack/keystone-0353-account-create-update-cqjvb" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.390843 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77ca1c4a-5e40-484a-ab87-c6cedf677c47-operator-scripts\") pod \"keystone-0353-account-create-update-cqjvb\" (UID: \"77ca1c4a-5e40-484a-ab87-c6cedf677c47\") " pod="openstack/keystone-0353-account-create-update-cqjvb" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.391015 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ggqwz" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.404404 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7x4qr" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.405100 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-ggqwz"] Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.418767 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvqfd\" (UniqueName: \"kubernetes.io/projected/77ca1c4a-5e40-484a-ab87-c6cedf677c47-kube-api-access-bvqfd\") pod \"keystone-0353-account-create-update-cqjvb\" (UID: \"77ca1c4a-5e40-484a-ab87-c6cedf677c47\") " pod="openstack/keystone-0353-account-create-update-cqjvb" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.450280 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-0373-account-create-update-dx68q"] Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.452487 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0373-account-create-update-dx68q" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.454902 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.462448 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-0373-account-create-update-dx68q"] Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.481618 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-0353-account-create-update-cqjvb" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.506906 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-operator-scripts\") pod \"placement-db-create-ggqwz\" (UID: \"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14\") " pod="openstack/placement-db-create-ggqwz" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.507287 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/464a3ed7-5fda-401e-973a-17f8f510a312-operator-scripts\") pod \"placement-0373-account-create-update-dx68q\" (UID: \"464a3ed7-5fda-401e-973a-17f8f510a312\") " pod="openstack/placement-0373-account-create-update-dx68q" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.507500 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xc6v\" (UniqueName: \"kubernetes.io/projected/464a3ed7-5fda-401e-973a-17f8f510a312-kube-api-access-2xc6v\") pod \"placement-0373-account-create-update-dx68q\" (UID: \"464a3ed7-5fda-401e-973a-17f8f510a312\") " pod="openstack/placement-0373-account-create-update-dx68q" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.508045 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w29t\" (UniqueName: \"kubernetes.io/projected/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-kube-api-access-4w29t\") pod \"placement-db-create-ggqwz\" (UID: \"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14\") " pod="openstack/placement-db-create-ggqwz" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.530527 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.537095 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-mwbv8" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.610835 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-dns-svc\") pod \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.610889 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2twd\" (UniqueName: \"kubernetes.io/projected/11b8d15d-d063-478d-8f4f-82d950f9aa2f-kube-api-access-b2twd\") pod \"11b8d15d-d063-478d-8f4f-82d950f9aa2f\" (UID: \"11b8d15d-d063-478d-8f4f-82d950f9aa2f\") " Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.610973 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-config\") pod \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.610991 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-sb\") pod \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.611184 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-nb\") pod \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.611230 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lndnk\" (UniqueName: \"kubernetes.io/projected/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-kube-api-access-lndnk\") pod \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\" (UID: \"0181c3d1-7d0e-4959-a9cb-e4e00e01188a\") " Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.611252 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11b8d15d-d063-478d-8f4f-82d950f9aa2f-operator-scripts\") pod \"11b8d15d-d063-478d-8f4f-82d950f9aa2f\" (UID: \"11b8d15d-d063-478d-8f4f-82d950f9aa2f\") " Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.611516 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-operator-scripts\") pod \"placement-db-create-ggqwz\" (UID: \"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14\") " pod="openstack/placement-db-create-ggqwz" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.611541 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/464a3ed7-5fda-401e-973a-17f8f510a312-operator-scripts\") pod \"placement-0373-account-create-update-dx68q\" (UID: \"464a3ed7-5fda-401e-973a-17f8f510a312\") " pod="openstack/placement-0373-account-create-update-dx68q" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.611581 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xc6v\" (UniqueName: \"kubernetes.io/projected/464a3ed7-5fda-401e-973a-17f8f510a312-kube-api-access-2xc6v\") pod \"placement-0373-account-create-update-dx68q\" (UID: \"464a3ed7-5fda-401e-973a-17f8f510a312\") " pod="openstack/placement-0373-account-create-update-dx68q" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.611642 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w29t\" (UniqueName: \"kubernetes.io/projected/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-kube-api-access-4w29t\") pod \"placement-db-create-ggqwz\" (UID: \"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14\") " pod="openstack/placement-db-create-ggqwz" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.613424 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11b8d15d-d063-478d-8f4f-82d950f9aa2f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "11b8d15d-d063-478d-8f4f-82d950f9aa2f" (UID: "11b8d15d-d063-478d-8f4f-82d950f9aa2f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.613785 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/464a3ed7-5fda-401e-973a-17f8f510a312-operator-scripts\") pod \"placement-0373-account-create-update-dx68q\" (UID: \"464a3ed7-5fda-401e-973a-17f8f510a312\") " pod="openstack/placement-0373-account-create-update-dx68q" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.614963 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-operator-scripts\") pod \"placement-db-create-ggqwz\" (UID: \"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14\") " pod="openstack/placement-db-create-ggqwz" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.623263 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-kube-api-access-lndnk" (OuterVolumeSpecName: "kube-api-access-lndnk") pod "0181c3d1-7d0e-4959-a9cb-e4e00e01188a" (UID: "0181c3d1-7d0e-4959-a9cb-e4e00e01188a"). InnerVolumeSpecName "kube-api-access-lndnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.638454 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11b8d15d-d063-478d-8f4f-82d950f9aa2f-kube-api-access-b2twd" (OuterVolumeSpecName: "kube-api-access-b2twd") pod "11b8d15d-d063-478d-8f4f-82d950f9aa2f" (UID: "11b8d15d-d063-478d-8f4f-82d950f9aa2f"). InnerVolumeSpecName "kube-api-access-b2twd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.649868 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3c1c-account-create-update-hv5bl" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.653774 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xc6v\" (UniqueName: \"kubernetes.io/projected/464a3ed7-5fda-401e-973a-17f8f510a312-kube-api-access-2xc6v\") pod \"placement-0373-account-create-update-dx68q\" (UID: \"464a3ed7-5fda-401e-973a-17f8f510a312\") " pod="openstack/placement-0373-account-create-update-dx68q" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.664302 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w29t\" (UniqueName: \"kubernetes.io/projected/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-kube-api-access-4w29t\") pod \"placement-db-create-ggqwz\" (UID: \"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14\") " pod="openstack/placement-db-create-ggqwz" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.712520 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fdfb\" (UniqueName: \"kubernetes.io/projected/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-kube-api-access-9fdfb\") pod \"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921\" (UID: \"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921\") " Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.713095 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-operator-scripts\") pod \"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921\" (UID: \"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921\") " Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.714531 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "513bc3b9-8ae8-4e8b-b02d-fb17f48f7921" (UID: "513bc3b9-8ae8-4e8b-b02d-fb17f48f7921"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.718773 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lndnk\" (UniqueName: \"kubernetes.io/projected/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-kube-api-access-lndnk\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.718830 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11b8d15d-d063-478d-8f4f-82d950f9aa2f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.718843 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2twd\" (UniqueName: \"kubernetes.io/projected/11b8d15d-d063-478d-8f4f-82d950f9aa2f-kube-api-access-b2twd\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.718857 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.729670 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-kube-api-access-9fdfb" (OuterVolumeSpecName: "kube-api-access-9fdfb") pod "513bc3b9-8ae8-4e8b-b02d-fb17f48f7921" (UID: "513bc3b9-8ae8-4e8b-b02d-fb17f48f7921"). InnerVolumeSpecName "kube-api-access-9fdfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.730141 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ggqwz" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.749894 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0181c3d1-7d0e-4959-a9cb-e4e00e01188a" (UID: "0181c3d1-7d0e-4959-a9cb-e4e00e01188a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.756520 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-config" (OuterVolumeSpecName: "config") pod "0181c3d1-7d0e-4959-a9cb-e4e00e01188a" (UID: "0181c3d1-7d0e-4959-a9cb-e4e00e01188a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.777491 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0373-account-create-update-dx68q" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.783428 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0181c3d1-7d0e-4959-a9cb-e4e00e01188a" (UID: "0181c3d1-7d0e-4959-a9cb-e4e00e01188a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.790716 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0181c3d1-7d0e-4959-a9cb-e4e00e01188a" (UID: "0181c3d1-7d0e-4959-a9cb-e4e00e01188a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.820838 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.820891 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fdfb\" (UniqueName: \"kubernetes.io/projected/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921-kube-api-access-9fdfb\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.820907 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.820919 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:10 crc kubenswrapper[4946]: I1204 15:23:10.820931 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0181c3d1-7d0e-4959-a9cb-e4e00e01188a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.107282 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-7x4qr"] Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.279407 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" event={"ID":"0181c3d1-7d0e-4959-a9cb-e4e00e01188a","Type":"ContainerDied","Data":"59d38d343a594947bd044d3e3461b2eaab7341b97cdd31fbf57347198944291b"} Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.279467 4946 scope.go:117] "RemoveContainer" containerID="012375251ecb14ff1f4a9357bc9da82be53eb5622f52eda1bd8d8d00c9fc5f14" Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.279612 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-nq7lb" Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.292446 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7x4qr" event={"ID":"3825a065-e047-480e-a2a0-3aa2a1bdba24","Type":"ContainerStarted","Data":"6999f648c9e0a1c988e77aba6a988c800dc158660402e189d9884eb85fcb8439"} Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.318860 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-mwbv8" event={"ID":"11b8d15d-d063-478d-8f4f-82d950f9aa2f","Type":"ContainerDied","Data":"7eeffd9a6e5d0019d02202875cf0bc0e4900bc422fea13f4a8f662ab74f08865"} Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.318930 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7eeffd9a6e5d0019d02202875cf0bc0e4900bc422fea13f4a8f662ab74f08865" Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.319050 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-mwbv8" Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.334401 4946 scope.go:117] "RemoveContainer" containerID="f4842c4c185feab1d9782038b57ef47bad92e0a65116c39e7bdc62f0b656ae31" Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.338403 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-nq7lb"] Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.349361 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-0353-account-create-update-cqjvb"] Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.366125 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-nq7lb"] Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.372808 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f4481828-f464-47c6-a803-0c1962101efa","Type":"ContainerStarted","Data":"3dca52226fe52c8743c96828e977701a03b099747bd82652707389a214d0cecf"} Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.391929 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-3c1c-account-create-update-hv5bl" event={"ID":"513bc3b9-8ae8-4e8b-b02d-fb17f48f7921","Type":"ContainerDied","Data":"073a8e0976a5a0cbf96e991298e807147333428ba6d9fb125732be802c01a2e2"} Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.391994 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="073a8e0976a5a0cbf96e991298e807147333428ba6d9fb125732be802c01a2e2" Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.392081 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3c1c-account-create-update-hv5bl" Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.404595 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-ggqwz"] Dec 04 15:23:11 crc kubenswrapper[4946]: W1204 15:23:11.410747 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a3c0985_1155_4e0e_b3ab_d8ff63d8fe14.slice/crio-ced574cc88a0467c53370a264dc9d909f4e735c51f817da5100a50d8bf628a5c WatchSource:0}: Error finding container ced574cc88a0467c53370a264dc9d909f4e735c51f817da5100a50d8bf628a5c: Status 404 returned error can't find the container with id ced574cc88a0467c53370a264dc9d909f4e735c51f817da5100a50d8bf628a5c Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.425713 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-0373-account-create-update-dx68q"] Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.429795 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=17.241950772 podStartE2EDuration="1m9.429775731s" podCreationTimestamp="2025-12-04 15:22:02 +0000 UTC" firstStartedPulling="2025-12-04 15:22:18.248001499 +0000 UTC m=+1189.134045140" lastFinishedPulling="2025-12-04 15:23:10.435826458 +0000 UTC m=+1241.321870099" observedRunningTime="2025-12-04 15:23:11.428848116 +0000 UTC m=+1242.314891757" watchObservedRunningTime="2025-12-04 15:23:11.429775731 +0000 UTC m=+1242.315819372" Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.524431 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0181c3d1-7d0e-4959-a9cb-e4e00e01188a" path="/var/lib/kubelet/pods/0181c3d1-7d0e-4959-a9cb-e4e00e01188a/volumes" Dec 04 15:23:11 crc kubenswrapper[4946]: I1204 15:23:11.806697 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.407732 4946 generic.go:334] "Generic (PLEG): container finished" podID="464a3ed7-5fda-401e-973a-17f8f510a312" containerID="4cf5db641d4bd34f638dec88f3a1345b8500314424bef26f383a7b145e33bdb6" exitCode=0 Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.407824 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0373-account-create-update-dx68q" event={"ID":"464a3ed7-5fda-401e-973a-17f8f510a312","Type":"ContainerDied","Data":"4cf5db641d4bd34f638dec88f3a1345b8500314424bef26f383a7b145e33bdb6"} Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.408266 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0373-account-create-update-dx68q" event={"ID":"464a3ed7-5fda-401e-973a-17f8f510a312","Type":"ContainerStarted","Data":"0770554f1567af052e68aa3a955eea0f5f93c22d8490a0872f9f922fc4714629"} Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.410482 4946 generic.go:334] "Generic (PLEG): container finished" podID="77ca1c4a-5e40-484a-ab87-c6cedf677c47" containerID="c353be8ba5469f93b16fd811a10bb3b69e2f449da844c5a4ce804eed88af8553" exitCode=0 Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.410576 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-0353-account-create-update-cqjvb" event={"ID":"77ca1c4a-5e40-484a-ab87-c6cedf677c47","Type":"ContainerDied","Data":"c353be8ba5469f93b16fd811a10bb3b69e2f449da844c5a4ce804eed88af8553"} Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.410618 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-0353-account-create-update-cqjvb" event={"ID":"77ca1c4a-5e40-484a-ab87-c6cedf677c47","Type":"ContainerStarted","Data":"87981ddf75b6f48f2a6eb1e1730d8d23ddca254eb85e698b888155f964c5e2ae"} Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.412825 4946 generic.go:334] "Generic (PLEG): container finished" podID="3825a065-e047-480e-a2a0-3aa2a1bdba24" containerID="00a7e538c3d0fe4c4dab760135e48f542dbbd468679d72b470d691d9ad41a628" exitCode=0 Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.412989 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7x4qr" event={"ID":"3825a065-e047-480e-a2a0-3aa2a1bdba24","Type":"ContainerDied","Data":"00a7e538c3d0fe4c4dab760135e48f542dbbd468679d72b470d691d9ad41a628"} Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.415030 4946 generic.go:334] "Generic (PLEG): container finished" podID="2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14" containerID="544ba986150b8f5a299b10cc7d15631439d7454fbb17bc8815f57ec59a33edfd" exitCode=0 Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.415098 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-ggqwz" event={"ID":"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14","Type":"ContainerDied","Data":"544ba986150b8f5a299b10cc7d15631439d7454fbb17bc8815f57ec59a33edfd"} Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.415141 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-ggqwz" event={"ID":"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14","Type":"ContainerStarted","Data":"ced574cc88a0467c53370a264dc9d909f4e735c51f817da5100a50d8bf628a5c"} Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.518586 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-qv4hw" podUID="2734e466-178a-4344-bfac-9adb5e4492a7" containerName="ovn-controller" probeResult="failure" output=< Dec 04 15:23:12 crc kubenswrapper[4946]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 04 15:23:12 crc kubenswrapper[4946]: > Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.526498 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.551185 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-hc6tt" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.847375 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-qv4hw-config-26s6r"] Dec 04 15:23:12 crc kubenswrapper[4946]: E1204 15:23:12.848093 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="513bc3b9-8ae8-4e8b-b02d-fb17f48f7921" containerName="mariadb-account-create-update" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.850946 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="513bc3b9-8ae8-4e8b-b02d-fb17f48f7921" containerName="mariadb-account-create-update" Dec 04 15:23:12 crc kubenswrapper[4946]: E1204 15:23:12.850998 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0181c3d1-7d0e-4959-a9cb-e4e00e01188a" containerName="init" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.851007 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="0181c3d1-7d0e-4959-a9cb-e4e00e01188a" containerName="init" Dec 04 15:23:12 crc kubenswrapper[4946]: E1204 15:23:12.851055 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11b8d15d-d063-478d-8f4f-82d950f9aa2f" containerName="mariadb-database-create" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.851066 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="11b8d15d-d063-478d-8f4f-82d950f9aa2f" containerName="mariadb-database-create" Dec 04 15:23:12 crc kubenswrapper[4946]: E1204 15:23:12.851083 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0181c3d1-7d0e-4959-a9cb-e4e00e01188a" containerName="dnsmasq-dns" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.851091 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="0181c3d1-7d0e-4959-a9cb-e4e00e01188a" containerName="dnsmasq-dns" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.851514 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="0181c3d1-7d0e-4959-a9cb-e4e00e01188a" containerName="dnsmasq-dns" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.851536 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="513bc3b9-8ae8-4e8b-b02d-fb17f48f7921" containerName="mariadb-account-create-update" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.851558 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="11b8d15d-d063-478d-8f4f-82d950f9aa2f" containerName="mariadb-database-create" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.852803 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.861488 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 04 15:23:12 crc kubenswrapper[4946]: I1204 15:23:12.873786 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qv4hw-config-26s6r"] Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.013721 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-log-ovn\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.013802 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-additional-scripts\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.013873 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-scripts\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.013925 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run-ovn\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.013992 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjdhd\" (UniqueName: \"kubernetes.io/projected/ab361f7d-2a62-454d-9e66-5adfbfdabac1-kube-api-access-kjdhd\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.014030 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.116807 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.116957 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-log-ovn\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.116999 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-additional-scripts\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.117075 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-scripts\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.117230 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.118082 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-additional-scripts\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.118164 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run-ovn\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.119375 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-scripts\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.119572 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-log-ovn\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.119635 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run-ovn\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.119864 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjdhd\" (UniqueName: \"kubernetes.io/projected/ab361f7d-2a62-454d-9e66-5adfbfdabac1-kube-api-access-kjdhd\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.165913 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjdhd\" (UniqueName: \"kubernetes.io/projected/ab361f7d-2a62-454d-9e66-5adfbfdabac1-kube-api-access-kjdhd\") pod \"ovn-controller-qv4hw-config-26s6r\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.182092 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.691462 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qv4hw-config-26s6r"] Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.815250 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:13 crc kubenswrapper[4946]: I1204 15:23:13.898168 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ggqwz" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.065775 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-operator-scripts\") pod \"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14\" (UID: \"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14\") " Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.066178 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w29t\" (UniqueName: \"kubernetes.io/projected/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-kube-api-access-4w29t\") pod \"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14\" (UID: \"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14\") " Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.066859 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14" (UID: "2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.068268 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.075311 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-kube-api-access-4w29t" (OuterVolumeSpecName: "kube-api-access-4w29t") pod "2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14" (UID: "2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14"). InnerVolumeSpecName "kube-api-access-4w29t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.170780 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w29t\" (UniqueName: \"kubernetes.io/projected/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14-kube-api-access-4w29t\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.186050 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0373-account-create-update-dx68q" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.192281 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7x4qr" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.202281 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-0353-account-create-update-cqjvb" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.274010 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/464a3ed7-5fda-401e-973a-17f8f510a312-operator-scripts\") pod \"464a3ed7-5fda-401e-973a-17f8f510a312\" (UID: \"464a3ed7-5fda-401e-973a-17f8f510a312\") " Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.274085 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3825a065-e047-480e-a2a0-3aa2a1bdba24-operator-scripts\") pod \"3825a065-e047-480e-a2a0-3aa2a1bdba24\" (UID: \"3825a065-e047-480e-a2a0-3aa2a1bdba24\") " Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.274370 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xc6v\" (UniqueName: \"kubernetes.io/projected/464a3ed7-5fda-401e-973a-17f8f510a312-kube-api-access-2xc6v\") pod \"464a3ed7-5fda-401e-973a-17f8f510a312\" (UID: \"464a3ed7-5fda-401e-973a-17f8f510a312\") " Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.274501 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62kl7\" (UniqueName: \"kubernetes.io/projected/3825a065-e047-480e-a2a0-3aa2a1bdba24-kube-api-access-62kl7\") pod \"3825a065-e047-480e-a2a0-3aa2a1bdba24\" (UID: \"3825a065-e047-480e-a2a0-3aa2a1bdba24\") " Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.275636 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3825a065-e047-480e-a2a0-3aa2a1bdba24-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3825a065-e047-480e-a2a0-3aa2a1bdba24" (UID: "3825a065-e047-480e-a2a0-3aa2a1bdba24"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.279204 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/464a3ed7-5fda-401e-973a-17f8f510a312-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "464a3ed7-5fda-401e-973a-17f8f510a312" (UID: "464a3ed7-5fda-401e-973a-17f8f510a312"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.280012 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/464a3ed7-5fda-401e-973a-17f8f510a312-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.280049 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3825a065-e047-480e-a2a0-3aa2a1bdba24-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.283831 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3825a065-e047-480e-a2a0-3aa2a1bdba24-kube-api-access-62kl7" (OuterVolumeSpecName: "kube-api-access-62kl7") pod "3825a065-e047-480e-a2a0-3aa2a1bdba24" (UID: "3825a065-e047-480e-a2a0-3aa2a1bdba24"). InnerVolumeSpecName "kube-api-access-62kl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.284799 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/464a3ed7-5fda-401e-973a-17f8f510a312-kube-api-access-2xc6v" (OuterVolumeSpecName: "kube-api-access-2xc6v") pod "464a3ed7-5fda-401e-973a-17f8f510a312" (UID: "464a3ed7-5fda-401e-973a-17f8f510a312"). InnerVolumeSpecName "kube-api-access-2xc6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.381017 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvqfd\" (UniqueName: \"kubernetes.io/projected/77ca1c4a-5e40-484a-ab87-c6cedf677c47-kube-api-access-bvqfd\") pod \"77ca1c4a-5e40-484a-ab87-c6cedf677c47\" (UID: \"77ca1c4a-5e40-484a-ab87-c6cedf677c47\") " Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.381196 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77ca1c4a-5e40-484a-ab87-c6cedf677c47-operator-scripts\") pod \"77ca1c4a-5e40-484a-ab87-c6cedf677c47\" (UID: \"77ca1c4a-5e40-484a-ab87-c6cedf677c47\") " Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.381604 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62kl7\" (UniqueName: \"kubernetes.io/projected/3825a065-e047-480e-a2a0-3aa2a1bdba24-kube-api-access-62kl7\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.381625 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xc6v\" (UniqueName: \"kubernetes.io/projected/464a3ed7-5fda-401e-973a-17f8f510a312-kube-api-access-2xc6v\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.381804 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77ca1c4a-5e40-484a-ab87-c6cedf677c47-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "77ca1c4a-5e40-484a-ab87-c6cedf677c47" (UID: "77ca1c4a-5e40-484a-ab87-c6cedf677c47"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.385460 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77ca1c4a-5e40-484a-ab87-c6cedf677c47-kube-api-access-bvqfd" (OuterVolumeSpecName: "kube-api-access-bvqfd") pod "77ca1c4a-5e40-484a-ab87-c6cedf677c47" (UID: "77ca1c4a-5e40-484a-ab87-c6cedf677c47"). InnerVolumeSpecName "kube-api-access-bvqfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.440201 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0373-account-create-update-dx68q" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.443272 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0373-account-create-update-dx68q" event={"ID":"464a3ed7-5fda-401e-973a-17f8f510a312","Type":"ContainerDied","Data":"0770554f1567af052e68aa3a955eea0f5f93c22d8490a0872f9f922fc4714629"} Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.443352 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0770554f1567af052e68aa3a955eea0f5f93c22d8490a0872f9f922fc4714629" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.445717 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-0353-account-create-update-cqjvb" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.445695 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-0353-account-create-update-cqjvb" event={"ID":"77ca1c4a-5e40-484a-ab87-c6cedf677c47","Type":"ContainerDied","Data":"87981ddf75b6f48f2a6eb1e1730d8d23ddca254eb85e698b888155f964c5e2ae"} Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.445985 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87981ddf75b6f48f2a6eb1e1730d8d23ddca254eb85e698b888155f964c5e2ae" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.446967 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7x4qr" event={"ID":"3825a065-e047-480e-a2a0-3aa2a1bdba24","Type":"ContainerDied","Data":"6999f648c9e0a1c988e77aba6a988c800dc158660402e189d9884eb85fcb8439"} Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.447071 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6999f648c9e0a1c988e77aba6a988c800dc158660402e189d9884eb85fcb8439" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.447259 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7x4qr" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.450971 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-ggqwz" event={"ID":"2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14","Type":"ContainerDied","Data":"ced574cc88a0467c53370a264dc9d909f4e735c51f817da5100a50d8bf628a5c"} Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.451015 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ced574cc88a0467c53370a264dc9d909f4e735c51f817da5100a50d8bf628a5c" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.450987 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ggqwz" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.453285 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qv4hw-config-26s6r" event={"ID":"ab361f7d-2a62-454d-9e66-5adfbfdabac1","Type":"ContainerStarted","Data":"0989b2d5faf284dfd2ca0a025fb876dc5ed596110fd952874fcc8ba8bb3cbca0"} Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.453363 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qv4hw-config-26s6r" event={"ID":"ab361f7d-2a62-454d-9e66-5adfbfdabac1","Type":"ContainerStarted","Data":"ba585211ae0f138d329b211c0ee42e8ad91ead4b608024eac5be17cff6cc2dbd"} Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.483565 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77ca1c4a-5e40-484a-ab87-c6cedf677c47-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.484201 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvqfd\" (UniqueName: \"kubernetes.io/projected/77ca1c4a-5e40-484a-ab87-c6cedf677c47-kube-api-access-bvqfd\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:14 crc kubenswrapper[4946]: I1204 15:23:14.952904 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-qv4hw-config-26s6r" podStartSLOduration=2.952885948 podStartE2EDuration="2.952885948s" podCreationTimestamp="2025-12-04 15:23:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:14.480103693 +0000 UTC m=+1245.366147334" watchObservedRunningTime="2025-12-04 15:23:14.952885948 +0000 UTC m=+1245.838929589" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.471620 4946 generic.go:334] "Generic (PLEG): container finished" podID="ab361f7d-2a62-454d-9e66-5adfbfdabac1" containerID="0989b2d5faf284dfd2ca0a025fb876dc5ed596110fd952874fcc8ba8bb3cbca0" exitCode=0 Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.471765 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qv4hw-config-26s6r" event={"ID":"ab361f7d-2a62-454d-9e66-5adfbfdabac1","Type":"ContainerDied","Data":"0989b2d5faf284dfd2ca0a025fb876dc5ed596110fd952874fcc8ba8bb3cbca0"} Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.959210 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-tb5mp"] Dec 04 15:23:15 crc kubenswrapper[4946]: E1204 15:23:15.960280 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77ca1c4a-5e40-484a-ab87-c6cedf677c47" containerName="mariadb-account-create-update" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.960298 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="77ca1c4a-5e40-484a-ab87-c6cedf677c47" containerName="mariadb-account-create-update" Dec 04 15:23:15 crc kubenswrapper[4946]: E1204 15:23:15.960312 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14" containerName="mariadb-database-create" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.960318 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14" containerName="mariadb-database-create" Dec 04 15:23:15 crc kubenswrapper[4946]: E1204 15:23:15.960348 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3825a065-e047-480e-a2a0-3aa2a1bdba24" containerName="mariadb-database-create" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.960354 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3825a065-e047-480e-a2a0-3aa2a1bdba24" containerName="mariadb-database-create" Dec 04 15:23:15 crc kubenswrapper[4946]: E1204 15:23:15.960366 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="464a3ed7-5fda-401e-973a-17f8f510a312" containerName="mariadb-account-create-update" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.960372 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="464a3ed7-5fda-401e-973a-17f8f510a312" containerName="mariadb-account-create-update" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.960595 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14" containerName="mariadb-database-create" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.960610 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="77ca1c4a-5e40-484a-ab87-c6cedf677c47" containerName="mariadb-account-create-update" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.960622 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3825a065-e047-480e-a2a0-3aa2a1bdba24" containerName="mariadb-database-create" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.960634 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="464a3ed7-5fda-401e-973a-17f8f510a312" containerName="mariadb-account-create-update" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.961534 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.964279 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.966379 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-w4thl" Dec 04 15:23:15 crc kubenswrapper[4946]: I1204 15:23:15.977992 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-tb5mp"] Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.122547 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-combined-ca-bundle\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.122798 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l9z9\" (UniqueName: \"kubernetes.io/projected/2a4e74d1-f18d-4356-be6d-10171056d511-kube-api-access-2l9z9\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.122879 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-config-data\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.122953 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-db-sync-config-data\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.225731 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-db-sync-config-data\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.226240 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-combined-ca-bundle\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.226513 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l9z9\" (UniqueName: \"kubernetes.io/projected/2a4e74d1-f18d-4356-be6d-10171056d511-kube-api-access-2l9z9\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.226630 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-config-data\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.235032 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-combined-ca-bundle\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.235805 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-db-sync-config-data\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.238650 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-config-data\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.261722 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l9z9\" (UniqueName: \"kubernetes.io/projected/2a4e74d1-f18d-4356-be6d-10171056d511-kube-api-access-2l9z9\") pod \"glance-db-sync-tb5mp\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.278704 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-tb5mp" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.331852 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="decd9bb2-7749-48ff-b886-74e49bf5222d" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 04 15:23:16 crc kubenswrapper[4946]: I1204 15:23:16.993372 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="920eb4d8-3aa1-4141-9f65-647e275405e4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: connect: connection refused" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.068258 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.150511 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run\") pod \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.150561 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-scripts\") pod \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.150663 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run-ovn\") pod \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.150713 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-additional-scripts\") pod \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.150791 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-log-ovn\") pod \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.150843 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run" (OuterVolumeSpecName: "var-run") pod "ab361f7d-2a62-454d-9e66-5adfbfdabac1" (UID: "ab361f7d-2a62-454d-9e66-5adfbfdabac1"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.150924 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "ab361f7d-2a62-454d-9e66-5adfbfdabac1" (UID: "ab361f7d-2a62-454d-9e66-5adfbfdabac1"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.150927 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "ab361f7d-2a62-454d-9e66-5adfbfdabac1" (UID: "ab361f7d-2a62-454d-9e66-5adfbfdabac1"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.152136 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "ab361f7d-2a62-454d-9e66-5adfbfdabac1" (UID: "ab361f7d-2a62-454d-9e66-5adfbfdabac1"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.152524 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-scripts" (OuterVolumeSpecName: "scripts") pod "ab361f7d-2a62-454d-9e66-5adfbfdabac1" (UID: "ab361f7d-2a62-454d-9e66-5adfbfdabac1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.152602 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjdhd\" (UniqueName: \"kubernetes.io/projected/ab361f7d-2a62-454d-9e66-5adfbfdabac1-kube-api-access-kjdhd\") pod \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\" (UID: \"ab361f7d-2a62-454d-9e66-5adfbfdabac1\") " Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.153269 4946 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.153484 4946 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.153498 4946 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.153513 4946 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ab361f7d-2a62-454d-9e66-5adfbfdabac1-var-run\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.153524 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab361f7d-2a62-454d-9e66-5adfbfdabac1-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.158136 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-tb5mp"] Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.160629 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab361f7d-2a62-454d-9e66-5adfbfdabac1-kube-api-access-kjdhd" (OuterVolumeSpecName: "kube-api-access-kjdhd") pod "ab361f7d-2a62-454d-9e66-5adfbfdabac1" (UID: "ab361f7d-2a62-454d-9e66-5adfbfdabac1"). InnerVolumeSpecName "kube-api-access-kjdhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:17 crc kubenswrapper[4946]: W1204 15:23:17.177499 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a4e74d1_f18d_4356_be6d_10171056d511.slice/crio-8386bca653e69858b62b0586af7f81ab948998fea9477cdecb0ba14f2e8070fe WatchSource:0}: Error finding container 8386bca653e69858b62b0586af7f81ab948998fea9477cdecb0ba14f2e8070fe: Status 404 returned error can't find the container with id 8386bca653e69858b62b0586af7f81ab948998fea9477cdecb0ba14f2e8070fe Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.255887 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjdhd\" (UniqueName: \"kubernetes.io/projected/ab361f7d-2a62-454d-9e66-5adfbfdabac1-kube-api-access-kjdhd\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.495552 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-qv4hw" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.553550 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-tb5mp" event={"ID":"2a4e74d1-f18d-4356-be6d-10171056d511","Type":"ContainerStarted","Data":"8386bca653e69858b62b0586af7f81ab948998fea9477cdecb0ba14f2e8070fe"} Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.559049 4946 generic.go:334] "Generic (PLEG): container finished" podID="f2266dde-4870-46a5-9c4a-c348c6c4d4ed" containerID="a6a3bcae5c0f1ff9ff5e72e6a5caf26107aaee9d3cb91b4ac7f3de46f0b7ab4b" exitCode=0 Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.559286 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rnp6k" event={"ID":"f2266dde-4870-46a5-9c4a-c348c6c4d4ed","Type":"ContainerDied","Data":"a6a3bcae5c0f1ff9ff5e72e6a5caf26107aaee9d3cb91b4ac7f3de46f0b7ab4b"} Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.564745 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qv4hw-config-26s6r" event={"ID":"ab361f7d-2a62-454d-9e66-5adfbfdabac1","Type":"ContainerDied","Data":"ba585211ae0f138d329b211c0ee42e8ad91ead4b608024eac5be17cff6cc2dbd"} Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.564797 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba585211ae0f138d329b211c0ee42e8ad91ead4b608024eac5be17cff6cc2dbd" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.564892 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qv4hw-config-26s6r" Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.640491 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-qv4hw-config-26s6r"] Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.656737 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-qv4hw-config-26s6r"] Dec 04 15:23:17 crc kubenswrapper[4946]: I1204 15:23:17.746393 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:23:18 crc kubenswrapper[4946]: I1204 15:23:18.816776 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:18 crc kubenswrapper[4946]: I1204 15:23:18.838755 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.083954 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.210191 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-swiftconf\") pod \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.210719 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-combined-ca-bundle\") pod \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.210802 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-etc-swift\") pod \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.210832 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-scripts\") pod \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.210870 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-ring-data-devices\") pod \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.210934 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-dispersionconf\") pod \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.211028 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjdk9\" (UniqueName: \"kubernetes.io/projected/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-kube-api-access-pjdk9\") pod \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\" (UID: \"f2266dde-4870-46a5-9c4a-c348c6c4d4ed\") " Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.211853 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "f2266dde-4870-46a5-9c4a-c348c6c4d4ed" (UID: "f2266dde-4870-46a5-9c4a-c348c6c4d4ed"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.212722 4946 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.212077 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "f2266dde-4870-46a5-9c4a-c348c6c4d4ed" (UID: "f2266dde-4870-46a5-9c4a-c348c6c4d4ed"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.222369 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-kube-api-access-pjdk9" (OuterVolumeSpecName: "kube-api-access-pjdk9") pod "f2266dde-4870-46a5-9c4a-c348c6c4d4ed" (UID: "f2266dde-4870-46a5-9c4a-c348c6c4d4ed"). InnerVolumeSpecName "kube-api-access-pjdk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.228085 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "f2266dde-4870-46a5-9c4a-c348c6c4d4ed" (UID: "f2266dde-4870-46a5-9c4a-c348c6c4d4ed"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.240808 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-scripts" (OuterVolumeSpecName: "scripts") pod "f2266dde-4870-46a5-9c4a-c348c6c4d4ed" (UID: "f2266dde-4870-46a5-9c4a-c348c6c4d4ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.252278 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "f2266dde-4870-46a5-9c4a-c348c6c4d4ed" (UID: "f2266dde-4870-46a5-9c4a-c348c6c4d4ed"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.254787 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2266dde-4870-46a5-9c4a-c348c6c4d4ed" (UID: "f2266dde-4870-46a5-9c4a-c348c6c4d4ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.315770 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.315810 4946 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.315830 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.315841 4946 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.315855 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjdk9\" (UniqueName: \"kubernetes.io/projected/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-kube-api-access-pjdk9\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.315866 4946 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f2266dde-4870-46a5-9c4a-c348c6c4d4ed-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.471650 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab361f7d-2a62-454d-9e66-5adfbfdabac1" path="/var/lib/kubelet/pods/ab361f7d-2a62-454d-9e66-5adfbfdabac1/volumes" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.593720 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rnp6k" event={"ID":"f2266dde-4870-46a5-9c4a-c348c6c4d4ed","Type":"ContainerDied","Data":"14d2c52052252d5a87359a60335c104e589cb8029a0313c9b763b638dac660ef"} Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.594161 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14d2c52052252d5a87359a60335c104e589cb8029a0313c9b763b638dac660ef" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.593830 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rnp6k" Dec 04 15:23:19 crc kubenswrapper[4946]: I1204 15:23:19.596241 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:22 crc kubenswrapper[4946]: I1204 15:23:22.727224 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 04 15:23:22 crc kubenswrapper[4946]: I1204 15:23:22.728019 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="prometheus" containerID="cri-o://0193c8b0a40b2db19afee087737226c05fe497fed21a90bc904a851c48f76fc1" gracePeriod=600 Dec 04 15:23:22 crc kubenswrapper[4946]: I1204 15:23:22.728474 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="thanos-sidecar" containerID="cri-o://3dca52226fe52c8743c96828e977701a03b099747bd82652707389a214d0cecf" gracePeriod=600 Dec 04 15:23:22 crc kubenswrapper[4946]: I1204 15:23:22.728671 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="config-reloader" containerID="cri-o://d5f65d3dc4cc9f8a234f9b1d6868d7b9a3e108491b68a9a42129c58b18aeb74d" gracePeriod=600 Dec 04 15:23:23 crc kubenswrapper[4946]: I1204 15:23:23.644603 4946 generic.go:334] "Generic (PLEG): container finished" podID="f4481828-f464-47c6-a803-0c1962101efa" containerID="3dca52226fe52c8743c96828e977701a03b099747bd82652707389a214d0cecf" exitCode=0 Dec 04 15:23:23 crc kubenswrapper[4946]: I1204 15:23:23.645162 4946 generic.go:334] "Generic (PLEG): container finished" podID="f4481828-f464-47c6-a803-0c1962101efa" containerID="d5f65d3dc4cc9f8a234f9b1d6868d7b9a3e108491b68a9a42129c58b18aeb74d" exitCode=0 Dec 04 15:23:23 crc kubenswrapper[4946]: I1204 15:23:23.645175 4946 generic.go:334] "Generic (PLEG): container finished" podID="f4481828-f464-47c6-a803-0c1962101efa" containerID="0193c8b0a40b2db19afee087737226c05fe497fed21a90bc904a851c48f76fc1" exitCode=0 Dec 04 15:23:23 crc kubenswrapper[4946]: I1204 15:23:23.644666 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f4481828-f464-47c6-a803-0c1962101efa","Type":"ContainerDied","Data":"3dca52226fe52c8743c96828e977701a03b099747bd82652707389a214d0cecf"} Dec 04 15:23:23 crc kubenswrapper[4946]: I1204 15:23:23.645243 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f4481828-f464-47c6-a803-0c1962101efa","Type":"ContainerDied","Data":"d5f65d3dc4cc9f8a234f9b1d6868d7b9a3e108491b68a9a42129c58b18aeb74d"} Dec 04 15:23:23 crc kubenswrapper[4946]: I1204 15:23:23.645262 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f4481828-f464-47c6-a803-0c1962101efa","Type":"ContainerDied","Data":"0193c8b0a40b2db19afee087737226c05fe497fed21a90bc904a851c48f76fc1"} Dec 04 15:23:23 crc kubenswrapper[4946]: I1204 15:23:23.816148 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.115:9090/-/ready\": dial tcp 10.217.0.115:9090: connect: connection refused" Dec 04 15:23:25 crc kubenswrapper[4946]: I1204 15:23:25.476360 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:23:25 crc kubenswrapper[4946]: I1204 15:23:25.486390 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b7676ab4-212c-4e17-a84a-0979a65936d1-etc-swift\") pod \"swift-storage-0\" (UID: \"b7676ab4-212c-4e17-a84a-0979a65936d1\") " pod="openstack/swift-storage-0" Dec 04 15:23:25 crc kubenswrapper[4946]: I1204 15:23:25.743623 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 04 15:23:26 crc kubenswrapper[4946]: I1204 15:23:26.332020 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="decd9bb2-7749-48ff-b886-74e49bf5222d" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 04 15:23:26 crc kubenswrapper[4946]: I1204 15:23:26.990323 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.520552 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-sf24r"] Dec 04 15:23:27 crc kubenswrapper[4946]: E1204 15:23:27.520897 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2266dde-4870-46a5-9c4a-c348c6c4d4ed" containerName="swift-ring-rebalance" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.520912 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2266dde-4870-46a5-9c4a-c348c6c4d4ed" containerName="swift-ring-rebalance" Dec 04 15:23:27 crc kubenswrapper[4946]: E1204 15:23:27.520950 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab361f7d-2a62-454d-9e66-5adfbfdabac1" containerName="ovn-config" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.520958 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab361f7d-2a62-454d-9e66-5adfbfdabac1" containerName="ovn-config" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.521143 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2266dde-4870-46a5-9c4a-c348c6c4d4ed" containerName="swift-ring-rebalance" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.521155 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab361f7d-2a62-454d-9e66-5adfbfdabac1" containerName="ovn-config" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.521796 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-sf24r" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.555094 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-sf24r"] Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.584595 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-c8d8-account-create-update-kpzbx"] Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.594296 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c8d8-account-create-update-kpzbx" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.604455 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.636311 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c8d8-account-create-update-kpzbx"] Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.637551 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q69sn\" (UniqueName: \"kubernetes.io/projected/77f8c12b-a81e-4b7a-b153-17f4320daeb5-kube-api-access-q69sn\") pod \"cinder-db-create-sf24r\" (UID: \"77f8c12b-a81e-4b7a-b153-17f4320daeb5\") " pod="openstack/cinder-db-create-sf24r" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.637630 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77f8c12b-a81e-4b7a-b153-17f4320daeb5-operator-scripts\") pod \"cinder-db-create-sf24r\" (UID: \"77f8c12b-a81e-4b7a-b153-17f4320daeb5\") " pod="openstack/cinder-db-create-sf24r" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.637718 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-operator-scripts\") pod \"cinder-c8d8-account-create-update-kpzbx\" (UID: \"9dbc4429-4eb9-4a7d-bd88-062fab6e1237\") " pod="openstack/cinder-c8d8-account-create-update-kpzbx" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.637769 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr7nq\" (UniqueName: \"kubernetes.io/projected/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-kube-api-access-hr7nq\") pod \"cinder-c8d8-account-create-update-kpzbx\" (UID: \"9dbc4429-4eb9-4a7d-bd88-062fab6e1237\") " pod="openstack/cinder-c8d8-account-create-update-kpzbx" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.754039 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77f8c12b-a81e-4b7a-b153-17f4320daeb5-operator-scripts\") pod \"cinder-db-create-sf24r\" (UID: \"77f8c12b-a81e-4b7a-b153-17f4320daeb5\") " pod="openstack/cinder-db-create-sf24r" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.754681 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-operator-scripts\") pod \"cinder-c8d8-account-create-update-kpzbx\" (UID: \"9dbc4429-4eb9-4a7d-bd88-062fab6e1237\") " pod="openstack/cinder-c8d8-account-create-update-kpzbx" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.754798 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr7nq\" (UniqueName: \"kubernetes.io/projected/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-kube-api-access-hr7nq\") pod \"cinder-c8d8-account-create-update-kpzbx\" (UID: \"9dbc4429-4eb9-4a7d-bd88-062fab6e1237\") " pod="openstack/cinder-c8d8-account-create-update-kpzbx" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.755064 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q69sn\" (UniqueName: \"kubernetes.io/projected/77f8c12b-a81e-4b7a-b153-17f4320daeb5-kube-api-access-q69sn\") pod \"cinder-db-create-sf24r\" (UID: \"77f8c12b-a81e-4b7a-b153-17f4320daeb5\") " pod="openstack/cinder-db-create-sf24r" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.760910 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-operator-scripts\") pod \"cinder-c8d8-account-create-update-kpzbx\" (UID: \"9dbc4429-4eb9-4a7d-bd88-062fab6e1237\") " pod="openstack/cinder-c8d8-account-create-update-kpzbx" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.770858 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77f8c12b-a81e-4b7a-b153-17f4320daeb5-operator-scripts\") pod \"cinder-db-create-sf24r\" (UID: \"77f8c12b-a81e-4b7a-b153-17f4320daeb5\") " pod="openstack/cinder-db-create-sf24r" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.815941 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-4w6dl"] Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.835959 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4w6dl" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.853997 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q69sn\" (UniqueName: \"kubernetes.io/projected/77f8c12b-a81e-4b7a-b153-17f4320daeb5-kube-api-access-q69sn\") pod \"cinder-db-create-sf24r\" (UID: \"77f8c12b-a81e-4b7a-b153-17f4320daeb5\") " pod="openstack/cinder-db-create-sf24r" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.854272 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr7nq\" (UniqueName: \"kubernetes.io/projected/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-kube-api-access-hr7nq\") pod \"cinder-c8d8-account-create-update-kpzbx\" (UID: \"9dbc4429-4eb9-4a7d-bd88-062fab6e1237\") " pod="openstack/cinder-c8d8-account-create-update-kpzbx" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.866867 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-sf24r" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.876524 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-4w6dl"] Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.938357 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c8d8-account-create-update-kpzbx" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.963366 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c809ac2-b396-40ae-ac9c-0eb18befdf08-operator-scripts\") pod \"barbican-db-create-4w6dl\" (UID: \"5c809ac2-b396-40ae-ac9c-0eb18befdf08\") " pod="openstack/barbican-db-create-4w6dl" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.963536 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6mb4\" (UniqueName: \"kubernetes.io/projected/5c809ac2-b396-40ae-ac9c-0eb18befdf08-kube-api-access-l6mb4\") pod \"barbican-db-create-4w6dl\" (UID: \"5c809ac2-b396-40ae-ac9c-0eb18befdf08\") " pod="openstack/barbican-db-create-4w6dl" Dec 04 15:23:27 crc kubenswrapper[4946]: I1204 15:23:27.998915 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-gzkww"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.000232 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.006626 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.007269 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-d8v6l" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.007354 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.007508 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.031609 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gzkww"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.061075 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-c72b-account-create-update-tc59c"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.062364 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c72b-account-create-update-tc59c" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.068684 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.074655 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6mb4\" (UniqueName: \"kubernetes.io/projected/5c809ac2-b396-40ae-ac9c-0eb18befdf08-kube-api-access-l6mb4\") pod \"barbican-db-create-4w6dl\" (UID: \"5c809ac2-b396-40ae-ac9c-0eb18befdf08\") " pod="openstack/barbican-db-create-4w6dl" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.075275 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c809ac2-b396-40ae-ac9c-0eb18befdf08-operator-scripts\") pod \"barbican-db-create-4w6dl\" (UID: \"5c809ac2-b396-40ae-ac9c-0eb18befdf08\") " pod="openstack/barbican-db-create-4w6dl" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.075527 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndb75\" (UniqueName: \"kubernetes.io/projected/4f646177-69a6-42cf-9d49-8be8541c58c1-kube-api-access-ndb75\") pod \"keystone-db-sync-gzkww\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.075694 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-combined-ca-bundle\") pod \"keystone-db-sync-gzkww\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.075851 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-config-data\") pod \"keystone-db-sync-gzkww\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.076270 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c809ac2-b396-40ae-ac9c-0eb18befdf08-operator-scripts\") pod \"barbican-db-create-4w6dl\" (UID: \"5c809ac2-b396-40ae-ac9c-0eb18befdf08\") " pod="openstack/barbican-db-create-4w6dl" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.090446 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-c72b-account-create-update-tc59c"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.127930 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6mb4\" (UniqueName: \"kubernetes.io/projected/5c809ac2-b396-40ae-ac9c-0eb18befdf08-kube-api-access-l6mb4\") pod \"barbican-db-create-4w6dl\" (UID: \"5c809ac2-b396-40ae-ac9c-0eb18befdf08\") " pod="openstack/barbican-db-create-4w6dl" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.135543 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-create-wpggf"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.136962 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-wpggf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.176178 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-wpggf"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.177359 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wld97\" (UniqueName: \"kubernetes.io/projected/98ae66d2-d939-4351-bf32-f649f37df068-kube-api-access-wld97\") pod \"cloudkitty-db-create-wpggf\" (UID: \"98ae66d2-d939-4351-bf32-f649f37df068\") " pod="openstack/cloudkitty-db-create-wpggf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.177460 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndb75\" (UniqueName: \"kubernetes.io/projected/4f646177-69a6-42cf-9d49-8be8541c58c1-kube-api-access-ndb75\") pod \"keystone-db-sync-gzkww\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.177484 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cd72c\" (UniqueName: \"kubernetes.io/projected/96a446b9-7b24-42fa-b6bb-99a22e323530-kube-api-access-cd72c\") pod \"barbican-c72b-account-create-update-tc59c\" (UID: \"96a446b9-7b24-42fa-b6bb-99a22e323530\") " pod="openstack/barbican-c72b-account-create-update-tc59c" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.177527 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-combined-ca-bundle\") pod \"keystone-db-sync-gzkww\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.177563 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98ae66d2-d939-4351-bf32-f649f37df068-operator-scripts\") pod \"cloudkitty-db-create-wpggf\" (UID: \"98ae66d2-d939-4351-bf32-f649f37df068\") " pod="openstack/cloudkitty-db-create-wpggf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.177601 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-config-data\") pod \"keystone-db-sync-gzkww\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.177641 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96a446b9-7b24-42fa-b6bb-99a22e323530-operator-scripts\") pod \"barbican-c72b-account-create-update-tc59c\" (UID: \"96a446b9-7b24-42fa-b6bb-99a22e323530\") " pod="openstack/barbican-c72b-account-create-update-tc59c" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.181728 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-combined-ca-bundle\") pod \"keystone-db-sync-gzkww\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.199960 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-config-data\") pod \"keystone-db-sync-gzkww\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.233105 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4w6dl" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.242302 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-qv6jf"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.243265 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndb75\" (UniqueName: \"kubernetes.io/projected/4f646177-69a6-42cf-9d49-8be8541c58c1-kube-api-access-ndb75\") pod \"keystone-db-sync-gzkww\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.243609 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qv6jf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.258386 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-623f-account-create-update-hlk8q"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.261024 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-623f-account-create-update-hlk8q" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.266427 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-db-secret" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.269934 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qv6jf"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.287733 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98ae66d2-d939-4351-bf32-f649f37df068-operator-scripts\") pod \"cloudkitty-db-create-wpggf\" (UID: \"98ae66d2-d939-4351-bf32-f649f37df068\") " pod="openstack/cloudkitty-db-create-wpggf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.287879 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96a446b9-7b24-42fa-b6bb-99a22e323530-operator-scripts\") pod \"barbican-c72b-account-create-update-tc59c\" (UID: \"96a446b9-7b24-42fa-b6bb-99a22e323530\") " pod="openstack/barbican-c72b-account-create-update-tc59c" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.287992 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wld97\" (UniqueName: \"kubernetes.io/projected/98ae66d2-d939-4351-bf32-f649f37df068-kube-api-access-wld97\") pod \"cloudkitty-db-create-wpggf\" (UID: \"98ae66d2-d939-4351-bf32-f649f37df068\") " pod="openstack/cloudkitty-db-create-wpggf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.288173 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cd72c\" (UniqueName: \"kubernetes.io/projected/96a446b9-7b24-42fa-b6bb-99a22e323530-kube-api-access-cd72c\") pod \"barbican-c72b-account-create-update-tc59c\" (UID: \"96a446b9-7b24-42fa-b6bb-99a22e323530\") " pod="openstack/barbican-c72b-account-create-update-tc59c" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.288932 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98ae66d2-d939-4351-bf32-f649f37df068-operator-scripts\") pod \"cloudkitty-db-create-wpggf\" (UID: \"98ae66d2-d939-4351-bf32-f649f37df068\") " pod="openstack/cloudkitty-db-create-wpggf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.289001 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96a446b9-7b24-42fa-b6bb-99a22e323530-operator-scripts\") pod \"barbican-c72b-account-create-update-tc59c\" (UID: \"96a446b9-7b24-42fa-b6bb-99a22e323530\") " pod="openstack/barbican-c72b-account-create-update-tc59c" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.308833 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-623f-account-create-update-hlk8q"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.320983 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cd72c\" (UniqueName: \"kubernetes.io/projected/96a446b9-7b24-42fa-b6bb-99a22e323530-kube-api-access-cd72c\") pod \"barbican-c72b-account-create-update-tc59c\" (UID: \"96a446b9-7b24-42fa-b6bb-99a22e323530\") " pod="openstack/barbican-c72b-account-create-update-tc59c" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.332445 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wld97\" (UniqueName: \"kubernetes.io/projected/98ae66d2-d939-4351-bf32-f649f37df068-kube-api-access-wld97\") pod \"cloudkitty-db-create-wpggf\" (UID: \"98ae66d2-d939-4351-bf32-f649f37df068\") " pod="openstack/cloudkitty-db-create-wpggf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.342608 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.389228 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c72b-account-create-update-tc59c" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.390418 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e935b12d-7145-47e5-b691-57bd2c9f1fac-operator-scripts\") pod \"neutron-db-create-qv6jf\" (UID: \"e935b12d-7145-47e5-b691-57bd2c9f1fac\") " pod="openstack/neutron-db-create-qv6jf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.390501 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99713bb8-2c08-402d-ba82-45e2e64ef670-operator-scripts\") pod \"cloudkitty-623f-account-create-update-hlk8q\" (UID: \"99713bb8-2c08-402d-ba82-45e2e64ef670\") " pod="openstack/cloudkitty-623f-account-create-update-hlk8q" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.390600 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqr9r\" (UniqueName: \"kubernetes.io/projected/99713bb8-2c08-402d-ba82-45e2e64ef670-kube-api-access-xqr9r\") pod \"cloudkitty-623f-account-create-update-hlk8q\" (UID: \"99713bb8-2c08-402d-ba82-45e2e64ef670\") " pod="openstack/cloudkitty-623f-account-create-update-hlk8q" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.390628 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crnvm\" (UniqueName: \"kubernetes.io/projected/e935b12d-7145-47e5-b691-57bd2c9f1fac-kube-api-access-crnvm\") pod \"neutron-db-create-qv6jf\" (UID: \"e935b12d-7145-47e5-b691-57bd2c9f1fac\") " pod="openstack/neutron-db-create-qv6jf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.417537 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-2e71-account-create-update-9w6zw"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.418882 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2e71-account-create-update-9w6zw" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.422833 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.432247 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-2e71-account-create-update-9w6zw"] Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.492479 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e935b12d-7145-47e5-b691-57bd2c9f1fac-operator-scripts\") pod \"neutron-db-create-qv6jf\" (UID: \"e935b12d-7145-47e5-b691-57bd2c9f1fac\") " pod="openstack/neutron-db-create-qv6jf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.492572 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl6dc\" (UniqueName: \"kubernetes.io/projected/35e0e7f6-eb97-4c70-8970-c9686b1579b7-kube-api-access-jl6dc\") pod \"neutron-2e71-account-create-update-9w6zw\" (UID: \"35e0e7f6-eb97-4c70-8970-c9686b1579b7\") " pod="openstack/neutron-2e71-account-create-update-9w6zw" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.492926 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35e0e7f6-eb97-4c70-8970-c9686b1579b7-operator-scripts\") pod \"neutron-2e71-account-create-update-9w6zw\" (UID: \"35e0e7f6-eb97-4c70-8970-c9686b1579b7\") " pod="openstack/neutron-2e71-account-create-update-9w6zw" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.493097 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99713bb8-2c08-402d-ba82-45e2e64ef670-operator-scripts\") pod \"cloudkitty-623f-account-create-update-hlk8q\" (UID: \"99713bb8-2c08-402d-ba82-45e2e64ef670\") " pod="openstack/cloudkitty-623f-account-create-update-hlk8q" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.493338 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e935b12d-7145-47e5-b691-57bd2c9f1fac-operator-scripts\") pod \"neutron-db-create-qv6jf\" (UID: \"e935b12d-7145-47e5-b691-57bd2c9f1fac\") " pod="openstack/neutron-db-create-qv6jf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.493555 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqr9r\" (UniqueName: \"kubernetes.io/projected/99713bb8-2c08-402d-ba82-45e2e64ef670-kube-api-access-xqr9r\") pod \"cloudkitty-623f-account-create-update-hlk8q\" (UID: \"99713bb8-2c08-402d-ba82-45e2e64ef670\") " pod="openstack/cloudkitty-623f-account-create-update-hlk8q" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.493595 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crnvm\" (UniqueName: \"kubernetes.io/projected/e935b12d-7145-47e5-b691-57bd2c9f1fac-kube-api-access-crnvm\") pod \"neutron-db-create-qv6jf\" (UID: \"e935b12d-7145-47e5-b691-57bd2c9f1fac\") " pod="openstack/neutron-db-create-qv6jf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.493987 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99713bb8-2c08-402d-ba82-45e2e64ef670-operator-scripts\") pod \"cloudkitty-623f-account-create-update-hlk8q\" (UID: \"99713bb8-2c08-402d-ba82-45e2e64ef670\") " pod="openstack/cloudkitty-623f-account-create-update-hlk8q" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.514233 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crnvm\" (UniqueName: \"kubernetes.io/projected/e935b12d-7145-47e5-b691-57bd2c9f1fac-kube-api-access-crnvm\") pod \"neutron-db-create-qv6jf\" (UID: \"e935b12d-7145-47e5-b691-57bd2c9f1fac\") " pod="openstack/neutron-db-create-qv6jf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.519303 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqr9r\" (UniqueName: \"kubernetes.io/projected/99713bb8-2c08-402d-ba82-45e2e64ef670-kube-api-access-xqr9r\") pod \"cloudkitty-623f-account-create-update-hlk8q\" (UID: \"99713bb8-2c08-402d-ba82-45e2e64ef670\") " pod="openstack/cloudkitty-623f-account-create-update-hlk8q" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.594966 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl6dc\" (UniqueName: \"kubernetes.io/projected/35e0e7f6-eb97-4c70-8970-c9686b1579b7-kube-api-access-jl6dc\") pod \"neutron-2e71-account-create-update-9w6zw\" (UID: \"35e0e7f6-eb97-4c70-8970-c9686b1579b7\") " pod="openstack/neutron-2e71-account-create-update-9w6zw" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.595058 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35e0e7f6-eb97-4c70-8970-c9686b1579b7-operator-scripts\") pod \"neutron-2e71-account-create-update-9w6zw\" (UID: \"35e0e7f6-eb97-4c70-8970-c9686b1579b7\") " pod="openstack/neutron-2e71-account-create-update-9w6zw" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.596614 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35e0e7f6-eb97-4c70-8970-c9686b1579b7-operator-scripts\") pod \"neutron-2e71-account-create-update-9w6zw\" (UID: \"35e0e7f6-eb97-4c70-8970-c9686b1579b7\") " pod="openstack/neutron-2e71-account-create-update-9w6zw" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.616749 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl6dc\" (UniqueName: \"kubernetes.io/projected/35e0e7f6-eb97-4c70-8970-c9686b1579b7-kube-api-access-jl6dc\") pod \"neutron-2e71-account-create-update-9w6zw\" (UID: \"35e0e7f6-eb97-4c70-8970-c9686b1579b7\") " pod="openstack/neutron-2e71-account-create-update-9w6zw" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.623498 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-wpggf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.642692 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qv6jf" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.651852 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-623f-account-create-update-hlk8q" Dec 04 15:23:28 crc kubenswrapper[4946]: I1204 15:23:28.739303 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2e71-account-create-update-9w6zw" Dec 04 15:23:31 crc kubenswrapper[4946]: I1204 15:23:31.816921 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.115:9090/-/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 15:23:35 crc kubenswrapper[4946]: E1204 15:23:35.330828 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 04 15:23:35 crc kubenswrapper[4946]: E1204 15:23:35.331842 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2l9z9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-tb5mp_openstack(2a4e74d1-f18d-4356-be6d-10171056d511): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:23:35 crc kubenswrapper[4946]: E1204 15:23:35.333444 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-tb5mp" podUID="2a4e74d1-f18d-4356-be6d-10171056d511" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.394761 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.491746 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-thanos-prometheus-http-client-file\") pod \"f4481828-f464-47c6-a803-0c1962101efa\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.491867 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f4481828-f464-47c6-a803-0c1962101efa-prometheus-metric-storage-rulefiles-0\") pod \"f4481828-f464-47c6-a803-0c1962101efa\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.491914 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-config\") pod \"f4481828-f464-47c6-a803-0c1962101efa\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.492033 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-web-config\") pod \"f4481828-f464-47c6-a803-0c1962101efa\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.492077 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-tls-assets\") pod \"f4481828-f464-47c6-a803-0c1962101efa\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.492108 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f4481828-f464-47c6-a803-0c1962101efa-config-out\") pod \"f4481828-f464-47c6-a803-0c1962101efa\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.492212 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lcmg\" (UniqueName: \"kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-kube-api-access-5lcmg\") pod \"f4481828-f464-47c6-a803-0c1962101efa\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.492426 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\") pod \"f4481828-f464-47c6-a803-0c1962101efa\" (UID: \"f4481828-f464-47c6-a803-0c1962101efa\") " Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.496941 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4481828-f464-47c6-a803-0c1962101efa-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "f4481828-f464-47c6-a803-0c1962101efa" (UID: "f4481828-f464-47c6-a803-0c1962101efa"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.504452 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-kube-api-access-5lcmg" (OuterVolumeSpecName: "kube-api-access-5lcmg") pod "f4481828-f464-47c6-a803-0c1962101efa" (UID: "f4481828-f464-47c6-a803-0c1962101efa"). InnerVolumeSpecName "kube-api-access-5lcmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.509909 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-config" (OuterVolumeSpecName: "config") pod "f4481828-f464-47c6-a803-0c1962101efa" (UID: "f4481828-f464-47c6-a803-0c1962101efa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.511632 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4481828-f464-47c6-a803-0c1962101efa-config-out" (OuterVolumeSpecName: "config-out") pod "f4481828-f464-47c6-a803-0c1962101efa" (UID: "f4481828-f464-47c6-a803-0c1962101efa"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.512796 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "f4481828-f464-47c6-a803-0c1962101efa" (UID: "f4481828-f464-47c6-a803-0c1962101efa"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.517656 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "f4481828-f464-47c6-a803-0c1962101efa" (UID: "f4481828-f464-47c6-a803-0c1962101efa"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.536241 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "f4481828-f464-47c6-a803-0c1962101efa" (UID: "f4481828-f464-47c6-a803-0c1962101efa"). InnerVolumeSpecName "pvc-1e082383-0efb-4236-a1c0-966370f5f4d4". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.595458 4946 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.596072 4946 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f4481828-f464-47c6-a803-0c1962101efa-config-out\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.596170 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lcmg\" (UniqueName: \"kubernetes.io/projected/f4481828-f464-47c6-a803-0c1962101efa-kube-api-access-5lcmg\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.596260 4946 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\") on node \"crc\" " Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.596326 4946 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.596399 4946 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f4481828-f464-47c6-a803-0c1962101efa-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.596478 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.607095 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-web-config" (OuterVolumeSpecName: "web-config") pod "f4481828-f464-47c6-a803-0c1962101efa" (UID: "f4481828-f464-47c6-a803-0c1962101efa"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.644760 4946 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.645187 4946 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-1e082383-0efb-4236-a1c0-966370f5f4d4" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4") on node "crc" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.698426 4946 reconciler_common.go:293] "Volume detached for volume \"pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.698808 4946 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f4481828-f464-47c6-a803-0c1962101efa-web-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.877951 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f4481828-f464-47c6-a803-0c1962101efa","Type":"ContainerDied","Data":"9ec3f7c4eb651702a7d79bd330dea930543197ac1b7fa44a3917e6e050c70b00"} Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.877982 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.881662 4946 scope.go:117] "RemoveContainer" containerID="3dca52226fe52c8743c96828e977701a03b099747bd82652707389a214d0cecf" Dec 04 15:23:35 crc kubenswrapper[4946]: E1204 15:23:35.888816 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-tb5mp" podUID="2a4e74d1-f18d-4356-be6d-10171056d511" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.946607 4946 scope.go:117] "RemoveContainer" containerID="d5f65d3dc4cc9f8a234f9b1d6868d7b9a3e108491b68a9a42129c58b18aeb74d" Dec 04 15:23:35 crc kubenswrapper[4946]: I1204 15:23:35.979764 4946 scope.go:117] "RemoveContainer" containerID="0193c8b0a40b2db19afee087737226c05fe497fed21a90bc904a851c48f76fc1" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.025729 4946 scope.go:117] "RemoveContainer" containerID="df1e7483f0fcfd97b3e3a7241aaae24f5fa48bebe91016ebba2b34b22f65ef80" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.077482 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.110865 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.150411 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 04 15:23:36 crc kubenswrapper[4946]: E1204 15:23:36.151196 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="config-reloader" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.151218 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="config-reloader" Dec 04 15:23:36 crc kubenswrapper[4946]: E1204 15:23:36.151244 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="thanos-sidecar" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.151253 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="thanos-sidecar" Dec 04 15:23:36 crc kubenswrapper[4946]: E1204 15:23:36.151287 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="init-config-reloader" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.151300 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="init-config-reloader" Dec 04 15:23:36 crc kubenswrapper[4946]: E1204 15:23:36.151315 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="prometheus" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.151324 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="prometheus" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.151585 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="config-reloader" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.151619 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="prometheus" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.151632 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="thanos-sidecar" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.155392 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.158538 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.158607 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.158688 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.158755 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.159749 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.161868 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-rvtnm" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.170747 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.176034 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.328089 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-c72b-account-create-update-tc59c"] Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.329288 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.329478 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.329553 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.329577 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.329822 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.329893 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6992\" (UniqueName: \"kubernetes.io/projected/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-kube-api-access-q6992\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.329958 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.330015 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.330976 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.331039 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.331069 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.331319 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.342689 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.448491 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.448621 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6992\" (UniqueName: \"kubernetes.io/projected/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-kube-api-access-q6992\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.448678 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.448725 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.448786 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.448812 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.448840 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.448888 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.448946 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.448988 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.449032 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.451975 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.466069 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.470311 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.472992 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.474290 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.476272 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.477616 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/68982753889d34acaa444cdcec2be2c562e2d871e8fc82af199886676f0b8e03/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.477736 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.478065 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.481884 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.482062 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6992\" (UniqueName: \"kubernetes.io/projected/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-kube-api-access-q6992\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.495461 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/ce1f1c5e-70ed-463e-88d7-a0a960dd328d-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.560138 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gzkww"] Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.612208 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c8d8-account-create-update-kpzbx"] Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.630490 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1e082383-0efb-4236-a1c0-966370f5f4d4\") pod \"prometheus-metric-storage-0\" (UID: \"ce1f1c5e-70ed-463e-88d7-a0a960dd328d\") " pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.630914 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.796034 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-wpggf"] Dec 04 15:23:36 crc kubenswrapper[4946]: W1204 15:23:36.808033 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c809ac2_b396_40ae_ac9c_0eb18befdf08.slice/crio-4142b8868ab9d1cf9a5b24399b93c9ddfef46d5d3467a92ed8869280d23a551b WatchSource:0}: Error finding container 4142b8868ab9d1cf9a5b24399b93c9ddfef46d5d3467a92ed8869280d23a551b: Status 404 returned error can't find the container with id 4142b8868ab9d1cf9a5b24399b93c9ddfef46d5d3467a92ed8869280d23a551b Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.823549 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="f4481828-f464-47c6-a803-0c1962101efa" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.115:9090/-/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.825448 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-4w6dl"] Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.825890 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.842231 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-2e71-account-create-update-9w6zw"] Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.875582 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.886901 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.914507 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-sf24r"] Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.917532 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4w6dl" event={"ID":"5c809ac2-b396-40ae-ac9c-0eb18befdf08","Type":"ContainerStarted","Data":"4142b8868ab9d1cf9a5b24399b93c9ddfef46d5d3467a92ed8869280d23a551b"} Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.920692 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gzkww" event={"ID":"4f646177-69a6-42cf-9d49-8be8541c58c1","Type":"ContainerStarted","Data":"dc0b3ef82e9a6012bea4a8efdfb0a8566645239aad26c175a083ff268334d3ed"} Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.922583 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-wpggf" event={"ID":"98ae66d2-d939-4351-bf32-f649f37df068","Type":"ContainerStarted","Data":"0e153514140ed408e4f19e2fa767e6c842a6b1f37f2d5bbd3a939339e48d5640"} Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.925795 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-sf24r" event={"ID":"77f8c12b-a81e-4b7a-b153-17f4320daeb5","Type":"ContainerStarted","Data":"d2ce79523d116f60c78b56972832c1098ca0258587a4390d9b2fdfd5a4e8bef7"} Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.927766 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2e71-account-create-update-9w6zw" event={"ID":"35e0e7f6-eb97-4c70-8970-c9686b1579b7","Type":"ContainerStarted","Data":"2e91a795738cb8befcb3ef333c122f630819394ff5afdd7e462c319a804de662"} Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.971482 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c72b-account-create-update-tc59c" event={"ID":"96a446b9-7b24-42fa-b6bb-99a22e323530","Type":"ContainerStarted","Data":"d0ad6217a4ac5111cc56d68909269f05cd5dcca0b234c56d2904891c04b09d39"} Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.971558 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c72b-account-create-update-tc59c" event={"ID":"96a446b9-7b24-42fa-b6bb-99a22e323530","Type":"ContainerStarted","Data":"7df25fab8d3469769e8147b8c66d1589972e4f5e158b593f146bb7ea0dbee146"} Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.979158 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c8d8-account-create-update-kpzbx" event={"ID":"9dbc4429-4eb9-4a7d-bd88-062fab6e1237","Type":"ContainerStarted","Data":"3124cb27f05cf471281baf0b088ac5759fa62d47d5d5c6352f3b191f4ec491a4"} Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.979256 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c8d8-account-create-update-kpzbx" event={"ID":"9dbc4429-4eb9-4a7d-bd88-062fab6e1237","Type":"ContainerStarted","Data":"7a96e5ab905c8b129f9d755b35a84a1d4eb032a2c8c33985c0770c46b8143b31"} Dec 04 15:23:36 crc kubenswrapper[4946]: I1204 15:23:36.981258 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"5619c36a85eeeb4f12e42561176159177afa2507b05a6031acb0d10ecbda40e1"} Dec 04 15:23:37 crc kubenswrapper[4946]: I1204 15:23:37.007873 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-c72b-account-create-update-tc59c" podStartSLOduration=10.007848463 podStartE2EDuration="10.007848463s" podCreationTimestamp="2025-12-04 15:23:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:37.004715078 +0000 UTC m=+1267.890758719" watchObservedRunningTime="2025-12-04 15:23:37.007848463 +0000 UTC m=+1267.893892104" Dec 04 15:23:37 crc kubenswrapper[4946]: I1204 15:23:37.028833 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-623f-account-create-update-hlk8q"] Dec 04 15:23:37 crc kubenswrapper[4946]: I1204 15:23:37.036733 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qv6jf"] Dec 04 15:23:37 crc kubenswrapper[4946]: I1204 15:23:37.040006 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-c8d8-account-create-update-kpzbx" podStartSLOduration=10.039970483 podStartE2EDuration="10.039970483s" podCreationTimestamp="2025-12-04 15:23:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:37.028796037 +0000 UTC m=+1267.914839668" watchObservedRunningTime="2025-12-04 15:23:37.039970483 +0000 UTC m=+1267.926014124" Dec 04 15:23:37 crc kubenswrapper[4946]: I1204 15:23:37.103468 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-db-secret" Dec 04 15:23:37 crc kubenswrapper[4946]: I1204 15:23:37.519967 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4481828-f464-47c6-a803-0c1962101efa" path="/var/lib/kubelet/pods/f4481828-f464-47c6-a803-0c1962101efa/volumes" Dec 04 15:23:37 crc kubenswrapper[4946]: I1204 15:23:37.660892 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.005109 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4w6dl" event={"ID":"5c809ac2-b396-40ae-ac9c-0eb18befdf08","Type":"ContainerStarted","Data":"9b259f85f0124a2d4d84c3ed3d78d2b72042e0ec09467385688507572f9b0f22"} Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.015698 4946 generic.go:334] "Generic (PLEG): container finished" podID="96a446b9-7b24-42fa-b6bb-99a22e323530" containerID="d0ad6217a4ac5111cc56d68909269f05cd5dcca0b234c56d2904891c04b09d39" exitCode=0 Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.016014 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c72b-account-create-update-tc59c" event={"ID":"96a446b9-7b24-42fa-b6bb-99a22e323530","Type":"ContainerDied","Data":"d0ad6217a4ac5111cc56d68909269f05cd5dcca0b234c56d2904891c04b09d39"} Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.021136 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-623f-account-create-update-hlk8q" event={"ID":"99713bb8-2c08-402d-ba82-45e2e64ef670","Type":"ContainerStarted","Data":"1050643e71f1a3316be0926b85a23c686c56b61696ff4f0e51e7469646e8ee85"} Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.021341 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-623f-account-create-update-hlk8q" event={"ID":"99713bb8-2c08-402d-ba82-45e2e64ef670","Type":"ContainerStarted","Data":"c3d5f533111f32dbd903f217d726e1282c691324ad0460365ef390dc216cf489"} Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.033092 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-4w6dl" podStartSLOduration=11.033061102 podStartE2EDuration="11.033061102s" podCreationTimestamp="2025-12-04 15:23:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:38.023102779 +0000 UTC m=+1268.909146420" watchObservedRunningTime="2025-12-04 15:23:38.033061102 +0000 UTC m=+1268.919104743" Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.044186 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce1f1c5e-70ed-463e-88d7-a0a960dd328d","Type":"ContainerStarted","Data":"79fd73b9702cc29f1d2316e27280216ff8fb6f74e0c52a5f2013694195e14d34"} Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.065364 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qv6jf" event={"ID":"e935b12d-7145-47e5-b691-57bd2c9f1fac","Type":"ContainerStarted","Data":"9cb67e95c5fbc2a0dd7e351697d2b13f8759c6be87c1162677ac584a152961e1"} Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.065433 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qv6jf" event={"ID":"e935b12d-7145-47e5-b691-57bd2c9f1fac","Type":"ContainerStarted","Data":"a4321bcd4d903e938376969fa0d95db280b94549a5195c477091f8b181bdfb13"} Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.078684 4946 generic.go:334] "Generic (PLEG): container finished" podID="9dbc4429-4eb9-4a7d-bd88-062fab6e1237" containerID="3124cb27f05cf471281baf0b088ac5759fa62d47d5d5c6352f3b191f4ec491a4" exitCode=0 Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.078854 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c8d8-account-create-update-kpzbx" event={"ID":"9dbc4429-4eb9-4a7d-bd88-062fab6e1237","Type":"ContainerDied","Data":"3124cb27f05cf471281baf0b088ac5759fa62d47d5d5c6352f3b191f4ec491a4"} Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.085477 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-sf24r" event={"ID":"77f8c12b-a81e-4b7a-b153-17f4320daeb5","Type":"ContainerStarted","Data":"ffead12db69d190a6075fb361de3c807760336976d12001249eb4e47eeba1fe5"} Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.092509 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-623f-account-create-update-hlk8q" podStartSLOduration=10.092478818 podStartE2EDuration="10.092478818s" podCreationTimestamp="2025-12-04 15:23:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:38.072405489 +0000 UTC m=+1268.958449130" watchObservedRunningTime="2025-12-04 15:23:38.092478818 +0000 UTC m=+1268.978522459" Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.093659 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2e71-account-create-update-9w6zw" event={"ID":"35e0e7f6-eb97-4c70-8970-c9686b1579b7","Type":"ContainerStarted","Data":"4021bee0f4d236cd7ba8f99489e15e19d390b37b0bd48940f3d6f57707ac0b57"} Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.105699 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-qv6jf" podStartSLOduration=10.105670859 podStartE2EDuration="10.105670859s" podCreationTimestamp="2025-12-04 15:23:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:38.091978044 +0000 UTC m=+1268.978021695" watchObservedRunningTime="2025-12-04 15:23:38.105670859 +0000 UTC m=+1268.991714500" Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.109637 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-wpggf" event={"ID":"98ae66d2-d939-4351-bf32-f649f37df068","Type":"ContainerStarted","Data":"681aaf0480a8cbf95e341377ca849c884d2988c38f2941f101dcb36df215c868"} Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.148328 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-sf24r" podStartSLOduration=11.148301266 podStartE2EDuration="11.148301266s" podCreationTimestamp="2025-12-04 15:23:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:38.140382629 +0000 UTC m=+1269.026426280" watchObservedRunningTime="2025-12-04 15:23:38.148301266 +0000 UTC m=+1269.034344907" Dec 04 15:23:38 crc kubenswrapper[4946]: I1204 15:23:38.178528 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-2e71-account-create-update-9w6zw" podStartSLOduration=10.178494303 podStartE2EDuration="10.178494303s" podCreationTimestamp="2025-12-04 15:23:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:38.164635633 +0000 UTC m=+1269.050679274" watchObservedRunningTime="2025-12-04 15:23:38.178494303 +0000 UTC m=+1269.064537944" Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.137279 4946 generic.go:334] "Generic (PLEG): container finished" podID="77f8c12b-a81e-4b7a-b153-17f4320daeb5" containerID="ffead12db69d190a6075fb361de3c807760336976d12001249eb4e47eeba1fe5" exitCode=0 Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.137708 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-sf24r" event={"ID":"77f8c12b-a81e-4b7a-b153-17f4320daeb5","Type":"ContainerDied","Data":"ffead12db69d190a6075fb361de3c807760336976d12001249eb4e47eeba1fe5"} Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.142559 4946 generic.go:334] "Generic (PLEG): container finished" podID="99713bb8-2c08-402d-ba82-45e2e64ef670" containerID="1050643e71f1a3316be0926b85a23c686c56b61696ff4f0e51e7469646e8ee85" exitCode=0 Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.142624 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-623f-account-create-update-hlk8q" event={"ID":"99713bb8-2c08-402d-ba82-45e2e64ef670","Type":"ContainerDied","Data":"1050643e71f1a3316be0926b85a23c686c56b61696ff4f0e51e7469646e8ee85"} Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.145050 4946 generic.go:334] "Generic (PLEG): container finished" podID="35e0e7f6-eb97-4c70-8970-c9686b1579b7" containerID="4021bee0f4d236cd7ba8f99489e15e19d390b37b0bd48940f3d6f57707ac0b57" exitCode=0 Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.145096 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2e71-account-create-update-9w6zw" event={"ID":"35e0e7f6-eb97-4c70-8970-c9686b1579b7","Type":"ContainerDied","Data":"4021bee0f4d236cd7ba8f99489e15e19d390b37b0bd48940f3d6f57707ac0b57"} Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.147032 4946 generic.go:334] "Generic (PLEG): container finished" podID="e935b12d-7145-47e5-b691-57bd2c9f1fac" containerID="9cb67e95c5fbc2a0dd7e351697d2b13f8759c6be87c1162677ac584a152961e1" exitCode=0 Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.147077 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qv6jf" event={"ID":"e935b12d-7145-47e5-b691-57bd2c9f1fac","Type":"ContainerDied","Data":"9cb67e95c5fbc2a0dd7e351697d2b13f8759c6be87c1162677ac584a152961e1"} Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.151003 4946 generic.go:334] "Generic (PLEG): container finished" podID="5c809ac2-b396-40ae-ac9c-0eb18befdf08" containerID="9b259f85f0124a2d4d84c3ed3d78d2b72042e0ec09467385688507572f9b0f22" exitCode=0 Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.151108 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4w6dl" event={"ID":"5c809ac2-b396-40ae-ac9c-0eb18befdf08","Type":"ContainerDied","Data":"9b259f85f0124a2d4d84c3ed3d78d2b72042e0ec09467385688507572f9b0f22"} Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.162466 4946 generic.go:334] "Generic (PLEG): container finished" podID="98ae66d2-d939-4351-bf32-f649f37df068" containerID="681aaf0480a8cbf95e341377ca849c884d2988c38f2941f101dcb36df215c868" exitCode=0 Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.162572 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-wpggf" event={"ID":"98ae66d2-d939-4351-bf32-f649f37df068","Type":"ContainerDied","Data":"681aaf0480a8cbf95e341377ca849c884d2988c38f2941f101dcb36df215c868"} Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.664941 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-wpggf" Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.779161 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98ae66d2-d939-4351-bf32-f649f37df068-operator-scripts\") pod \"98ae66d2-d939-4351-bf32-f649f37df068\" (UID: \"98ae66d2-d939-4351-bf32-f649f37df068\") " Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.779627 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wld97\" (UniqueName: \"kubernetes.io/projected/98ae66d2-d939-4351-bf32-f649f37df068-kube-api-access-wld97\") pod \"98ae66d2-d939-4351-bf32-f649f37df068\" (UID: \"98ae66d2-d939-4351-bf32-f649f37df068\") " Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.782404 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98ae66d2-d939-4351-bf32-f649f37df068-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "98ae66d2-d939-4351-bf32-f649f37df068" (UID: "98ae66d2-d939-4351-bf32-f649f37df068"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.860356 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98ae66d2-d939-4351-bf32-f649f37df068-kube-api-access-wld97" (OuterVolumeSpecName: "kube-api-access-wld97") pod "98ae66d2-d939-4351-bf32-f649f37df068" (UID: "98ae66d2-d939-4351-bf32-f649f37df068"). InnerVolumeSpecName "kube-api-access-wld97". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.882685 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98ae66d2-d939-4351-bf32-f649f37df068-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:39 crc kubenswrapper[4946]: I1204 15:23:39.882730 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wld97\" (UniqueName: \"kubernetes.io/projected/98ae66d2-d939-4351-bf32-f649f37df068-kube-api-access-wld97\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.095413 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c8d8-account-create-update-kpzbx" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.099543 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c72b-account-create-update-tc59c" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.189697 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hr7nq\" (UniqueName: \"kubernetes.io/projected/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-kube-api-access-hr7nq\") pod \"9dbc4429-4eb9-4a7d-bd88-062fab6e1237\" (UID: \"9dbc4429-4eb9-4a7d-bd88-062fab6e1237\") " Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.189811 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96a446b9-7b24-42fa-b6bb-99a22e323530-operator-scripts\") pod \"96a446b9-7b24-42fa-b6bb-99a22e323530\" (UID: \"96a446b9-7b24-42fa-b6bb-99a22e323530\") " Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.189881 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-operator-scripts\") pod \"9dbc4429-4eb9-4a7d-bd88-062fab6e1237\" (UID: \"9dbc4429-4eb9-4a7d-bd88-062fab6e1237\") " Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.189997 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cd72c\" (UniqueName: \"kubernetes.io/projected/96a446b9-7b24-42fa-b6bb-99a22e323530-kube-api-access-cd72c\") pod \"96a446b9-7b24-42fa-b6bb-99a22e323530\" (UID: \"96a446b9-7b24-42fa-b6bb-99a22e323530\") " Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.190789 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a446b9-7b24-42fa-b6bb-99a22e323530-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "96a446b9-7b24-42fa-b6bb-99a22e323530" (UID: "96a446b9-7b24-42fa-b6bb-99a22e323530"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.191603 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9dbc4429-4eb9-4a7d-bd88-062fab6e1237" (UID: "9dbc4429-4eb9-4a7d-bd88-062fab6e1237"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.200919 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-kube-api-access-hr7nq" (OuterVolumeSpecName: "kube-api-access-hr7nq") pod "9dbc4429-4eb9-4a7d-bd88-062fab6e1237" (UID: "9dbc4429-4eb9-4a7d-bd88-062fab6e1237"). InnerVolumeSpecName "kube-api-access-hr7nq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.205735 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c72b-account-create-update-tc59c" event={"ID":"96a446b9-7b24-42fa-b6bb-99a22e323530","Type":"ContainerDied","Data":"7df25fab8d3469769e8147b8c66d1589972e4f5e158b593f146bb7ea0dbee146"} Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.205800 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7df25fab8d3469769e8147b8c66d1589972e4f5e158b593f146bb7ea0dbee146" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.205903 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c72b-account-create-update-tc59c" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.213943 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96a446b9-7b24-42fa-b6bb-99a22e323530-kube-api-access-cd72c" (OuterVolumeSpecName: "kube-api-access-cd72c") pod "96a446b9-7b24-42fa-b6bb-99a22e323530" (UID: "96a446b9-7b24-42fa-b6bb-99a22e323530"). InnerVolumeSpecName "kube-api-access-cd72c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.230562 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-wpggf" event={"ID":"98ae66d2-d939-4351-bf32-f649f37df068","Type":"ContainerDied","Data":"0e153514140ed408e4f19e2fa767e6c842a6b1f37f2d5bbd3a939339e48d5640"} Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.230594 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-wpggf" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.230621 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e153514140ed408e4f19e2fa767e6c842a6b1f37f2d5bbd3a939339e48d5640" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.257526 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c8d8-account-create-update-kpzbx" event={"ID":"9dbc4429-4eb9-4a7d-bd88-062fab6e1237","Type":"ContainerDied","Data":"7a96e5ab905c8b129f9d755b35a84a1d4eb032a2c8c33985c0770c46b8143b31"} Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.257587 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a96e5ab905c8b129f9d755b35a84a1d4eb032a2c8c33985c0770c46b8143b31" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.257690 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c8d8-account-create-update-kpzbx" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.273601 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"ce74b607895c0f31404fd56fe339515789acce60ae1d3807d028a079d0351a6a"} Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.304211 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hr7nq\" (UniqueName: \"kubernetes.io/projected/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-kube-api-access-hr7nq\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.304242 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96a446b9-7b24-42fa-b6bb-99a22e323530-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.304252 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dbc4429-4eb9-4a7d-bd88-062fab6e1237-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:40 crc kubenswrapper[4946]: I1204 15:23:40.304261 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cd72c\" (UniqueName: \"kubernetes.io/projected/96a446b9-7b24-42fa-b6bb-99a22e323530-kube-api-access-cd72c\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:41 crc kubenswrapper[4946]: I1204 15:23:41.286230 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"5f872d05cb79680e8a9eaa681faeed072c6aa9dc7b5534b41bb1fc2281037973"} Dec 04 15:23:41 crc kubenswrapper[4946]: I1204 15:23:41.289256 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce1f1c5e-70ed-463e-88d7-a0a960dd328d","Type":"ContainerStarted","Data":"f97430f74e6c88321ceb3bf64310f3833ebbad118eccce8975622d5fdd42331f"} Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.745996 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qv6jf" Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.795951 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crnvm\" (UniqueName: \"kubernetes.io/projected/e935b12d-7145-47e5-b691-57bd2c9f1fac-kube-api-access-crnvm\") pod \"e935b12d-7145-47e5-b691-57bd2c9f1fac\" (UID: \"e935b12d-7145-47e5-b691-57bd2c9f1fac\") " Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.796017 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e935b12d-7145-47e5-b691-57bd2c9f1fac-operator-scripts\") pod \"e935b12d-7145-47e5-b691-57bd2c9f1fac\" (UID: \"e935b12d-7145-47e5-b691-57bd2c9f1fac\") " Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.797866 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e935b12d-7145-47e5-b691-57bd2c9f1fac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e935b12d-7145-47e5-b691-57bd2c9f1fac" (UID: "e935b12d-7145-47e5-b691-57bd2c9f1fac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.820201 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-623f-account-create-update-hlk8q" Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.823740 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e935b12d-7145-47e5-b691-57bd2c9f1fac-kube-api-access-crnvm" (OuterVolumeSpecName: "kube-api-access-crnvm") pod "e935b12d-7145-47e5-b691-57bd2c9f1fac" (UID: "e935b12d-7145-47e5-b691-57bd2c9f1fac"). InnerVolumeSpecName "kube-api-access-crnvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.890285 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-sf24r" Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.897265 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2e71-account-create-update-9w6zw" Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.897596 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99713bb8-2c08-402d-ba82-45e2e64ef670-operator-scripts\") pod \"99713bb8-2c08-402d-ba82-45e2e64ef670\" (UID: \"99713bb8-2c08-402d-ba82-45e2e64ef670\") " Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.897835 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqr9r\" (UniqueName: \"kubernetes.io/projected/99713bb8-2c08-402d-ba82-45e2e64ef670-kube-api-access-xqr9r\") pod \"99713bb8-2c08-402d-ba82-45e2e64ef670\" (UID: \"99713bb8-2c08-402d-ba82-45e2e64ef670\") " Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.898357 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crnvm\" (UniqueName: \"kubernetes.io/projected/e935b12d-7145-47e5-b691-57bd2c9f1fac-kube-api-access-crnvm\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.898379 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e935b12d-7145-47e5-b691-57bd2c9f1fac-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.898821 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99713bb8-2c08-402d-ba82-45e2e64ef670-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "99713bb8-2c08-402d-ba82-45e2e64ef670" (UID: "99713bb8-2c08-402d-ba82-45e2e64ef670"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.906275 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99713bb8-2c08-402d-ba82-45e2e64ef670-kube-api-access-xqr9r" (OuterVolumeSpecName: "kube-api-access-xqr9r") pod "99713bb8-2c08-402d-ba82-45e2e64ef670" (UID: "99713bb8-2c08-402d-ba82-45e2e64ef670"). InnerVolumeSpecName "kube-api-access-xqr9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:43 crc kubenswrapper[4946]: I1204 15:23:43.910012 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4w6dl" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:43.999918 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jl6dc\" (UniqueName: \"kubernetes.io/projected/35e0e7f6-eb97-4c70-8970-c9686b1579b7-kube-api-access-jl6dc\") pod \"35e0e7f6-eb97-4c70-8970-c9686b1579b7\" (UID: \"35e0e7f6-eb97-4c70-8970-c9686b1579b7\") " Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.000001 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35e0e7f6-eb97-4c70-8970-c9686b1579b7-operator-scripts\") pod \"35e0e7f6-eb97-4c70-8970-c9686b1579b7\" (UID: \"35e0e7f6-eb97-4c70-8970-c9686b1579b7\") " Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.000108 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6mb4\" (UniqueName: \"kubernetes.io/projected/5c809ac2-b396-40ae-ac9c-0eb18befdf08-kube-api-access-l6mb4\") pod \"5c809ac2-b396-40ae-ac9c-0eb18befdf08\" (UID: \"5c809ac2-b396-40ae-ac9c-0eb18befdf08\") " Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.000152 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77f8c12b-a81e-4b7a-b153-17f4320daeb5-operator-scripts\") pod \"77f8c12b-a81e-4b7a-b153-17f4320daeb5\" (UID: \"77f8c12b-a81e-4b7a-b153-17f4320daeb5\") " Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.000205 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q69sn\" (UniqueName: \"kubernetes.io/projected/77f8c12b-a81e-4b7a-b153-17f4320daeb5-kube-api-access-q69sn\") pod \"77f8c12b-a81e-4b7a-b153-17f4320daeb5\" (UID: \"77f8c12b-a81e-4b7a-b153-17f4320daeb5\") " Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.000244 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c809ac2-b396-40ae-ac9c-0eb18befdf08-operator-scripts\") pod \"5c809ac2-b396-40ae-ac9c-0eb18befdf08\" (UID: \"5c809ac2-b396-40ae-ac9c-0eb18befdf08\") " Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.000654 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqr9r\" (UniqueName: \"kubernetes.io/projected/99713bb8-2c08-402d-ba82-45e2e64ef670-kube-api-access-xqr9r\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.000676 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99713bb8-2c08-402d-ba82-45e2e64ef670-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.001287 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c809ac2-b396-40ae-ac9c-0eb18befdf08-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5c809ac2-b396-40ae-ac9c-0eb18befdf08" (UID: "5c809ac2-b396-40ae-ac9c-0eb18befdf08"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.001513 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35e0e7f6-eb97-4c70-8970-c9686b1579b7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "35e0e7f6-eb97-4c70-8970-c9686b1579b7" (UID: "35e0e7f6-eb97-4c70-8970-c9686b1579b7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.001805 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77f8c12b-a81e-4b7a-b153-17f4320daeb5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "77f8c12b-a81e-4b7a-b153-17f4320daeb5" (UID: "77f8c12b-a81e-4b7a-b153-17f4320daeb5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.005795 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35e0e7f6-eb97-4c70-8970-c9686b1579b7-kube-api-access-jl6dc" (OuterVolumeSpecName: "kube-api-access-jl6dc") pod "35e0e7f6-eb97-4c70-8970-c9686b1579b7" (UID: "35e0e7f6-eb97-4c70-8970-c9686b1579b7"). InnerVolumeSpecName "kube-api-access-jl6dc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.005869 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c809ac2-b396-40ae-ac9c-0eb18befdf08-kube-api-access-l6mb4" (OuterVolumeSpecName: "kube-api-access-l6mb4") pod "5c809ac2-b396-40ae-ac9c-0eb18befdf08" (UID: "5c809ac2-b396-40ae-ac9c-0eb18befdf08"). InnerVolumeSpecName "kube-api-access-l6mb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.006436 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77f8c12b-a81e-4b7a-b153-17f4320daeb5-kube-api-access-q69sn" (OuterVolumeSpecName: "kube-api-access-q69sn") pod "77f8c12b-a81e-4b7a-b153-17f4320daeb5" (UID: "77f8c12b-a81e-4b7a-b153-17f4320daeb5"). InnerVolumeSpecName "kube-api-access-q69sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.103667 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jl6dc\" (UniqueName: \"kubernetes.io/projected/35e0e7f6-eb97-4c70-8970-c9686b1579b7-kube-api-access-jl6dc\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.104185 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35e0e7f6-eb97-4c70-8970-c9686b1579b7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.104202 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6mb4\" (UniqueName: \"kubernetes.io/projected/5c809ac2-b396-40ae-ac9c-0eb18befdf08-kube-api-access-l6mb4\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.104215 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77f8c12b-a81e-4b7a-b153-17f4320daeb5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.104226 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q69sn\" (UniqueName: \"kubernetes.io/projected/77f8c12b-a81e-4b7a-b153-17f4320daeb5-kube-api-access-q69sn\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.104237 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c809ac2-b396-40ae-ac9c-0eb18befdf08-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.328091 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4w6dl" event={"ID":"5c809ac2-b396-40ae-ac9c-0eb18befdf08","Type":"ContainerDied","Data":"4142b8868ab9d1cf9a5b24399b93c9ddfef46d5d3467a92ed8869280d23a551b"} Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.328706 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4142b8868ab9d1cf9a5b24399b93c9ddfef46d5d3467a92ed8869280d23a551b" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.328156 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4w6dl" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.338331 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gzkww" event={"ID":"4f646177-69a6-42cf-9d49-8be8541c58c1","Type":"ContainerStarted","Data":"4274ff774468bb4c61f9e42263c8acee46be3eb6297d25aab29a2fd62cacd8b9"} Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.343435 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-sf24r" event={"ID":"77f8c12b-a81e-4b7a-b153-17f4320daeb5","Type":"ContainerDied","Data":"d2ce79523d116f60c78b56972832c1098ca0258587a4390d9b2fdfd5a4e8bef7"} Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.343496 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2ce79523d116f60c78b56972832c1098ca0258587a4390d9b2fdfd5a4e8bef7" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.343453 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-sf24r" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.346784 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-623f-account-create-update-hlk8q" event={"ID":"99713bb8-2c08-402d-ba82-45e2e64ef670","Type":"ContainerDied","Data":"c3d5f533111f32dbd903f217d726e1282c691324ad0460365ef390dc216cf489"} Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.346850 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3d5f533111f32dbd903f217d726e1282c691324ad0460365ef390dc216cf489" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.346941 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-623f-account-create-update-hlk8q" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.368130 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"7de85a778b39bc200acd9d0d3451d75e20af836e10d4f4e6f1647a818c45f5cc"} Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.368198 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"7eaca8c909cbf9447b52c63a4e6325f75a95fba54e1767814e0a9729a4d9fe06"} Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.368741 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-gzkww" podStartSLOduration=10.459413867 podStartE2EDuration="17.368704031s" podCreationTimestamp="2025-12-04 15:23:27 +0000 UTC" firstStartedPulling="2025-12-04 15:23:36.601314813 +0000 UTC m=+1267.487358444" lastFinishedPulling="2025-12-04 15:23:43.510604967 +0000 UTC m=+1274.396648608" observedRunningTime="2025-12-04 15:23:44.3606424 +0000 UTC m=+1275.246686051" watchObservedRunningTime="2025-12-04 15:23:44.368704031 +0000 UTC m=+1275.254747672" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.371078 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2e71-account-create-update-9w6zw" event={"ID":"35e0e7f6-eb97-4c70-8970-c9686b1579b7","Type":"ContainerDied","Data":"2e91a795738cb8befcb3ef333c122f630819394ff5afdd7e462c319a804de662"} Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.371143 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e91a795738cb8befcb3ef333c122f630819394ff5afdd7e462c319a804de662" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.371229 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2e71-account-create-update-9w6zw" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.379832 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qv6jf" event={"ID":"e935b12d-7145-47e5-b691-57bd2c9f1fac","Type":"ContainerDied","Data":"a4321bcd4d903e938376969fa0d95db280b94549a5195c477091f8b181bdfb13"} Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.379900 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4321bcd4d903e938376969fa0d95db280b94549a5195c477091f8b181bdfb13" Dec 04 15:23:44 crc kubenswrapper[4946]: I1204 15:23:44.380288 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qv6jf" Dec 04 15:23:47 crc kubenswrapper[4946]: I1204 15:23:47.417851 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"0e4ddd8f0eea069cc2f966e43230a6a73ff34e13f623ecffb301a5f5f91b5bf1"} Dec 04 15:23:48 crc kubenswrapper[4946]: I1204 15:23:48.434507 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"f541ee146ac22338b4b472c82d958f6600dabae72e429fd9c81e5188776607bb"} Dec 04 15:23:48 crc kubenswrapper[4946]: I1204 15:23:48.434556 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"fb233431b919d69950391624a7c36d4c27f2768022dc674faa8c4d2935c0fc2b"} Dec 04 15:23:49 crc kubenswrapper[4946]: E1204 15:23:49.307495 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce1f1c5e_70ed_463e_88d7_a0a960dd328d.slice/crio-f97430f74e6c88321ceb3bf64310f3833ebbad118eccce8975622d5fdd42331f.scope\": RecentStats: unable to find data in memory cache]" Dec 04 15:23:49 crc kubenswrapper[4946]: I1204 15:23:49.451057 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"c974fd7ea732972eeeb76f7edfaa8af4dadaa54f8a52b9245b5484f231ee8533"} Dec 04 15:23:49 crc kubenswrapper[4946]: I1204 15:23:49.453548 4946 generic.go:334] "Generic (PLEG): container finished" podID="ce1f1c5e-70ed-463e-88d7-a0a960dd328d" containerID="f97430f74e6c88321ceb3bf64310f3833ebbad118eccce8975622d5fdd42331f" exitCode=0 Dec 04 15:23:49 crc kubenswrapper[4946]: I1204 15:23:49.465614 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce1f1c5e-70ed-463e-88d7-a0a960dd328d","Type":"ContainerDied","Data":"f97430f74e6c88321ceb3bf64310f3833ebbad118eccce8975622d5fdd42331f"} Dec 04 15:23:50 crc kubenswrapper[4946]: I1204 15:23:50.470916 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce1f1c5e-70ed-463e-88d7-a0a960dd328d","Type":"ContainerStarted","Data":"96cfcfa9f35959d66ed655c60a8ad0391ab5be2336d15b2b9b55c9f5d67c976f"} Dec 04 15:23:51 crc kubenswrapper[4946]: I1204 15:23:51.489797 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"e4fc2c2e1548eddbb731c70e85d6b4b6b7db1b0e9b3d9136cc36d5014fa91c36"} Dec 04 15:23:51 crc kubenswrapper[4946]: I1204 15:23:51.490327 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"16097d836f3b99af63a51974310f66dd1497ae726a6b560cc9e948457c5af6ad"} Dec 04 15:23:51 crc kubenswrapper[4946]: I1204 15:23:51.492529 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-tb5mp" event={"ID":"2a4e74d1-f18d-4356-be6d-10171056d511","Type":"ContainerStarted","Data":"cef4183916354fd644469fc88df61c2c5b305125508468352a41d6b663967ffd"} Dec 04 15:23:51 crc kubenswrapper[4946]: I1204 15:23:51.502426 4946 generic.go:334] "Generic (PLEG): container finished" podID="4f646177-69a6-42cf-9d49-8be8541c58c1" containerID="4274ff774468bb4c61f9e42263c8acee46be3eb6297d25aab29a2fd62cacd8b9" exitCode=0 Dec 04 15:23:51 crc kubenswrapper[4946]: I1204 15:23:51.502481 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gzkww" event={"ID":"4f646177-69a6-42cf-9d49-8be8541c58c1","Type":"ContainerDied","Data":"4274ff774468bb4c61f9e42263c8acee46be3eb6297d25aab29a2fd62cacd8b9"} Dec 04 15:23:51 crc kubenswrapper[4946]: I1204 15:23:51.564860 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-tb5mp" podStartSLOduration=3.857389756 podStartE2EDuration="36.564820817s" podCreationTimestamp="2025-12-04 15:23:15 +0000 UTC" firstStartedPulling="2025-12-04 15:23:17.185379939 +0000 UTC m=+1248.071423580" lastFinishedPulling="2025-12-04 15:23:49.892811 +0000 UTC m=+1280.778854641" observedRunningTime="2025-12-04 15:23:51.535154655 +0000 UTC m=+1282.421198316" watchObservedRunningTime="2025-12-04 15:23:51.564820817 +0000 UTC m=+1282.450864458" Dec 04 15:23:52 crc kubenswrapper[4946]: I1204 15:23:52.533162 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"b9a0900a7bc2ac14912f5da689c3525c5a6844ff6849cc9d93670c62db7da398"} Dec 04 15:23:52 crc kubenswrapper[4946]: I1204 15:23:52.533587 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"3c7cb58b5f148c52dc0abcafbfe2bafee2d0a42b50388faf29aed05357bc663d"} Dec 04 15:23:52 crc kubenswrapper[4946]: I1204 15:23:52.533598 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"74def2022460c10bca1558ae52f53f86573ede9dae89e2fe648ff55607d78cd3"} Dec 04 15:23:52 crc kubenswrapper[4946]: I1204 15:23:52.986846 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.033068 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-config-data\") pod \"4f646177-69a6-42cf-9d49-8be8541c58c1\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.033233 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndb75\" (UniqueName: \"kubernetes.io/projected/4f646177-69a6-42cf-9d49-8be8541c58c1-kube-api-access-ndb75\") pod \"4f646177-69a6-42cf-9d49-8be8541c58c1\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.033318 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-combined-ca-bundle\") pod \"4f646177-69a6-42cf-9d49-8be8541c58c1\" (UID: \"4f646177-69a6-42cf-9d49-8be8541c58c1\") " Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.053445 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f646177-69a6-42cf-9d49-8be8541c58c1-kube-api-access-ndb75" (OuterVolumeSpecName: "kube-api-access-ndb75") pod "4f646177-69a6-42cf-9d49-8be8541c58c1" (UID: "4f646177-69a6-42cf-9d49-8be8541c58c1"). InnerVolumeSpecName "kube-api-access-ndb75". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.075254 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f646177-69a6-42cf-9d49-8be8541c58c1" (UID: "4f646177-69a6-42cf-9d49-8be8541c58c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.109842 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-config-data" (OuterVolumeSpecName: "config-data") pod "4f646177-69a6-42cf-9d49-8be8541c58c1" (UID: "4f646177-69a6-42cf-9d49-8be8541c58c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.136519 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.136574 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndb75\" (UniqueName: \"kubernetes.io/projected/4f646177-69a6-42cf-9d49-8be8541c58c1-kube-api-access-ndb75\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.136592 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f646177-69a6-42cf-9d49-8be8541c58c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.549652 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gzkww" event={"ID":"4f646177-69a6-42cf-9d49-8be8541c58c1","Type":"ContainerDied","Data":"dc0b3ef82e9a6012bea4a8efdfb0a8566645239aad26c175a083ff268334d3ed"} Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.550090 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc0b3ef82e9a6012bea4a8efdfb0a8566645239aad26c175a083ff268334d3ed" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.550094 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gzkww" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.558948 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"f40ba6a98739007848e14d95a83fb664530ea78c8a463ff629485cb5a9df6907"} Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.559356 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b7676ab4-212c-4e17-a84a-0979a65936d1","Type":"ContainerStarted","Data":"6291a884ca230a2fc1a9557815c86d2cb85b9762cc9488e113a04742752047ce"} Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.564904 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce1f1c5e-70ed-463e-88d7-a0a960dd328d","Type":"ContainerStarted","Data":"5349a4de9d8b557bd3671b1ae92de10804fb21bb1d3d6031ae4964d4bea4769c"} Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.564961 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce1f1c5e-70ed-463e-88d7-a0a960dd328d","Type":"ContainerStarted","Data":"89efc3099166c01fb180df4b4d0b8d48ab6bdc258f058fc07ec808b0ccb41f3a"} Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.630779 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=47.588157527 podStartE2EDuration="1m1.630752838s" podCreationTimestamp="2025-12-04 15:22:52 +0000 UTC" firstStartedPulling="2025-12-04 15:23:36.885873234 +0000 UTC m=+1267.771916875" lastFinishedPulling="2025-12-04 15:23:50.928468545 +0000 UTC m=+1281.814512186" observedRunningTime="2025-12-04 15:23:53.605389764 +0000 UTC m=+1284.491433405" watchObservedRunningTime="2025-12-04 15:23:53.630752838 +0000 UTC m=+1284.516796479" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.656269 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=17.656241896 podStartE2EDuration="17.656241896s" podCreationTimestamp="2025-12-04 15:23:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:53.639893179 +0000 UTC m=+1284.525936820" watchObservedRunningTime="2025-12-04 15:23:53.656241896 +0000 UTC m=+1284.542285557" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.830389 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-fnf9d"] Dec 04 15:23:53 crc kubenswrapper[4946]: E1204 15:23:53.830872 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e935b12d-7145-47e5-b691-57bd2c9f1fac" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.830885 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e935b12d-7145-47e5-b691-57bd2c9f1fac" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: E1204 15:23:53.830897 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96a446b9-7b24-42fa-b6bb-99a22e323530" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.830903 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="96a446b9-7b24-42fa-b6bb-99a22e323530" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: E1204 15:23:53.830909 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e0e7f6-eb97-4c70-8970-c9686b1579b7" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.830915 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e0e7f6-eb97-4c70-8970-c9686b1579b7" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: E1204 15:23:53.830925 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99713bb8-2c08-402d-ba82-45e2e64ef670" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.830934 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="99713bb8-2c08-402d-ba82-45e2e64ef670" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: E1204 15:23:53.830963 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77f8c12b-a81e-4b7a-b153-17f4320daeb5" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.830969 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="77f8c12b-a81e-4b7a-b153-17f4320daeb5" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: E1204 15:23:53.830978 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98ae66d2-d939-4351-bf32-f649f37df068" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.830984 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="98ae66d2-d939-4351-bf32-f649f37df068" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: E1204 15:23:53.830992 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c809ac2-b396-40ae-ac9c-0eb18befdf08" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.830997 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c809ac2-b396-40ae-ac9c-0eb18befdf08" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: E1204 15:23:53.831011 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f646177-69a6-42cf-9d49-8be8541c58c1" containerName="keystone-db-sync" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.831017 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f646177-69a6-42cf-9d49-8be8541c58c1" containerName="keystone-db-sync" Dec 04 15:23:53 crc kubenswrapper[4946]: E1204 15:23:53.831035 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dbc4429-4eb9-4a7d-bd88-062fab6e1237" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.831041 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dbc4429-4eb9-4a7d-bd88-062fab6e1237" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.837087 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f646177-69a6-42cf-9d49-8be8541c58c1" containerName="keystone-db-sync" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.837148 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="35e0e7f6-eb97-4c70-8970-c9686b1579b7" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.837169 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="98ae66d2-d939-4351-bf32-f649f37df068" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.837183 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c809ac2-b396-40ae-ac9c-0eb18befdf08" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.837206 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="96a446b9-7b24-42fa-b6bb-99a22e323530" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.837224 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dbc4429-4eb9-4a7d-bd88-062fab6e1237" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.837233 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="99713bb8-2c08-402d-ba82-45e2e64ef670" containerName="mariadb-account-create-update" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.837257 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="77f8c12b-a81e-4b7a-b153-17f4320daeb5" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.837280 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e935b12d-7145-47e5-b691-57bd2c9f1fac" containerName="mariadb-database-create" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.838314 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.841801 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.846598 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.848384 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-d8v6l" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.848578 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.848724 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.854651 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-mvfks"] Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.856970 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.877202 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fnf9d"] Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.907612 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-mvfks"] Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.953308 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-config-data\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.953382 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.953449 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-scripts\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.953482 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-combined-ca-bundle\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.953511 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfgb2\" (UniqueName: \"kubernetes.io/projected/56e85ac6-319e-4d28-bef2-10772f98318f-kube-api-access-qfgb2\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.953555 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-fernet-keys\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.953589 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-dns-svc\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.953610 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zsjg\" (UniqueName: \"kubernetes.io/projected/4c3653c9-449b-469f-8606-24c51c1cce6d-kube-api-access-4zsjg\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.953631 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-credential-keys\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.953649 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:53 crc kubenswrapper[4946]: I1204 15:23:53.953676 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-config\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.056002 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-combined-ca-bundle\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.056529 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfgb2\" (UniqueName: \"kubernetes.io/projected/56e85ac6-319e-4d28-bef2-10772f98318f-kube-api-access-qfgb2\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.056720 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-fernet-keys\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.056898 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-dns-svc\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.057020 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zsjg\" (UniqueName: \"kubernetes.io/projected/4c3653c9-449b-469f-8606-24c51c1cce6d-kube-api-access-4zsjg\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.057141 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-credential-keys\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.057362 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.057496 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-config\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.057649 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-config-data\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.057798 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.058009 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-scripts\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.060068 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-dns-svc\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.060702 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.061227 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.061712 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-config\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.075516 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-fernet-keys\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.077587 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-credential-keys\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.078060 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-config-data\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.094896 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-combined-ca-bundle\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.095575 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-scripts\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.128902 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfgb2\" (UniqueName: \"kubernetes.io/projected/56e85ac6-319e-4d28-bef2-10772f98318f-kube-api-access-qfgb2\") pod \"dnsmasq-dns-f877ddd87-mvfks\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.137600 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zsjg\" (UniqueName: \"kubernetes.io/projected/4c3653c9-449b-469f-8606-24c51c1cce6d-kube-api-access-4zsjg\") pod \"keystone-bootstrap-fnf9d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.163802 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-mvfks"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.164933 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.165704 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.250868 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-tvf2t"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.316747 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.328683 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.392495 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-b5qmt" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.409898 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-c2nbj"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.430271 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.435257 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.534838 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-cqwq4"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.541187 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.546314 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.546624 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.546828 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-wl6mt" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.569715 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-tvf2t"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.593675 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-c2nbj"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.602783 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc8f6\" (UniqueName: \"kubernetes.io/projected/e10e4d14-3642-427f-aaf4-e54ce4e747a0-kube-api-access-fc8f6\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.602866 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvw6r\" (UniqueName: \"kubernetes.io/projected/7a2d2577-fc5e-4375-8c8f-154aa218707f-kube-api-access-kvw6r\") pod \"barbican-db-sync-tvf2t\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.602928 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-config\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.602948 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.602974 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.603022 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-db-sync-config-data\") pod \"barbican-db-sync-tvf2t\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.603058 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.603093 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-svc\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.603211 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-combined-ca-bundle\") pod \"barbican-db-sync-tvf2t\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.612922 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-cqwq4"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.621911 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-x2xgf"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.623393 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.634995 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.635676 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.636191 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-mjj4j" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.649099 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-x2xgf"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.678939 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-w5njq"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.680834 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.684408 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.686187 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-z46wn" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.686311 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.688247 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.704769 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bjrd\" (UniqueName: \"kubernetes.io/projected/8fe7f895-e33e-4159-9dcd-689158d16f22-kube-api-access-5bjrd\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.704828 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-scripts\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.704872 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-config-data\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.704890 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fe7f895-e33e-4159-9dcd-689158d16f22-etc-machine-id\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.704914 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc8f6\" (UniqueName: \"kubernetes.io/projected/e10e4d14-3642-427f-aaf4-e54ce4e747a0-kube-api-access-fc8f6\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.704930 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-combined-ca-bundle\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.704978 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvw6r\" (UniqueName: \"kubernetes.io/projected/7a2d2577-fc5e-4375-8c8f-154aa218707f-kube-api-access-kvw6r\") pod \"barbican-db-sync-tvf2t\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.704995 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-db-sync-config-data\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.705052 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-config\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.705068 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.705164 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.705216 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-db-sync-config-data\") pod \"barbican-db-sync-tvf2t\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.705246 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.705272 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-svc\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.705344 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-combined-ca-bundle\") pod \"barbican-db-sync-tvf2t\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.708959 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-config\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.709583 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.710096 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.710266 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-svc\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.716248 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-db-sync-config-data\") pod \"barbican-db-sync-tvf2t\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.721825 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-combined-ca-bundle\") pod \"barbican-db-sync-tvf2t\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.722039 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.737149 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-w5njq"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.752693 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc8f6\" (UniqueName: \"kubernetes.io/projected/e10e4d14-3642-427f-aaf4-e54ce4e747a0-kube-api-access-fc8f6\") pod \"dnsmasq-dns-5959f8865f-c2nbj\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.765557 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvw6r\" (UniqueName: \"kubernetes.io/projected/7a2d2577-fc5e-4375-8c8f-154aa218707f-kube-api-access-kvw6r\") pod \"barbican-db-sync-tvf2t\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.792038 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-fzhsp"] Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.794397 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.809137 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-scripts\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.809566 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-config\") pod \"neutron-db-sync-x2xgf\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.809735 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-config-data\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.809788 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fe7f895-e33e-4159-9dcd-689158d16f22-etc-machine-id\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.809822 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-combined-ca-bundle\") pod \"neutron-db-sync-x2xgf\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.809842 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-combined-ca-bundle\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.809873 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-config-data\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.809929 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-db-sync-config-data\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.810040 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-certs\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.810061 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-scripts\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.810148 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhtqm\" (UniqueName: \"kubernetes.io/projected/53776995-5c2b-44a6-bbd2-ff624640c0b3-kube-api-access-zhtqm\") pod \"neutron-db-sync-x2xgf\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.810221 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-combined-ca-bundle\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.810239 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-td52c\" (UniqueName: \"kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-kube-api-access-td52c\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.810322 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bjrd\" (UniqueName: \"kubernetes.io/projected/8fe7f895-e33e-4159-9dcd-689158d16f22-kube-api-access-5bjrd\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.818306 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-scripts\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.819948 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fe7f895-e33e-4159-9dcd-689158d16f22-etc-machine-id\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.822669 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.824680 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.824796 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-db-sync-config-data\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.824862 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-v798j" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.835201 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-combined-ca-bundle\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.846047 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.850378 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-config-data\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:54 crc kubenswrapper[4946]: I1204 15:23:54.861753 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bjrd\" (UniqueName: \"kubernetes.io/projected/8fe7f895-e33e-4159-9dcd-689158d16f22-kube-api-access-5bjrd\") pod \"cinder-db-sync-cqwq4\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.872379 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-fzhsp"] Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.898742 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.905320 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-c2nbj"] Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913384 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhtqm\" (UniqueName: \"kubernetes.io/projected/53776995-5c2b-44a6-bbd2-ff624640c0b3-kube-api-access-zhtqm\") pod \"neutron-db-sync-x2xgf\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913467 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl8qx\" (UniqueName: \"kubernetes.io/projected/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-kube-api-access-dl8qx\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913504 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-combined-ca-bundle\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913522 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-td52c\" (UniqueName: \"kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-kube-api-access-td52c\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913554 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-config-data\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913591 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-combined-ca-bundle\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913626 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-config\") pod \"neutron-db-sync-x2xgf\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913657 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-combined-ca-bundle\") pod \"neutron-db-sync-x2xgf\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913677 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-config-data\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913695 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-scripts\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913756 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-logs\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913811 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-certs\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.913828 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-scripts\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.923788 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-combined-ca-bundle\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.924429 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-config\") pod \"neutron-db-sync-x2xgf\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.924488 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-scripts\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.927352 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-certs\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.929092 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-config-data\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.941481 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-combined-ca-bundle\") pod \"neutron-db-sync-x2xgf\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.947146 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-gvts4"] Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.949044 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.968375 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhtqm\" (UniqueName: \"kubernetes.io/projected/53776995-5c2b-44a6-bbd2-ff624640c0b3-kube-api-access-zhtqm\") pod \"neutron-db-sync-x2xgf\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.979827 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-gvts4"] Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.992671 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-td52c\" (UniqueName: \"kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-kube-api-access-td52c\") pod \"cloudkitty-db-sync-w5njq\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:54.995230 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.002553 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.004260 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.010519 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.014815 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.016726 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.023389 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-combined-ca-bundle\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.023527 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-scripts\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.023614 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsww2\" (UniqueName: \"kubernetes.io/projected/539ee175-1c38-4ed1-8c41-52a5af211b83-kube-api-access-tsww2\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.023684 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-logs\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.023710 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.023757 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.023787 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.023959 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl8qx\" (UniqueName: \"kubernetes.io/projected/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-kube-api-access-dl8qx\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.024022 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-config-data\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.024051 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.024132 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-config\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.028141 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-logs\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.062638 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-scripts\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.070510 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl8qx\" (UniqueName: \"kubernetes.io/projected/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-kube-api-access-dl8qx\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.076634 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-config-data\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.090044 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-combined-ca-bundle\") pod \"placement-db-sync-fzhsp\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.125813 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.125882 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.125906 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.125924 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.125990 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.126044 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-run-httpd\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.126130 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-log-httpd\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.126165 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.126188 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-config\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.126278 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-config-data\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.126315 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsww2\" (UniqueName: \"kubernetes.io/projected/539ee175-1c38-4ed1-8c41-52a5af211b83-kube-api-access-tsww2\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.126338 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-scripts\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.126355 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5mkk\" (UniqueName: \"kubernetes.io/projected/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-kube-api-access-c5mkk\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.127545 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.128258 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.128919 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.130250 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-mvfks"] Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.130868 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.131833 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-config\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.156942 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsww2\" (UniqueName: \"kubernetes.io/projected/539ee175-1c38-4ed1-8c41-52a5af211b83-kube-api-access-tsww2\") pod \"dnsmasq-dns-58dd9ff6bc-gvts4\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.168013 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-fnf9d"] Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.227944 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-config-data\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.228037 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-scripts\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.228060 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5mkk\" (UniqueName: \"kubernetes.io/projected/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-kube-api-access-c5mkk\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.228192 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.228280 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.228341 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-run-httpd\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.228364 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-log-httpd\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.229598 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-log-httpd\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.229660 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-run-httpd\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.232920 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-scripts\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.236166 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-config-data\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.239621 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.243394 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.258797 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5mkk\" (UniqueName: \"kubernetes.io/projected/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-kube-api-access-c5mkk\") pod \"ceilometer-0\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.263282 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.281870 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.363318 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fzhsp" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.396390 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.404421 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.635254 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-mvfks" event={"ID":"56e85ac6-319e-4d28-bef2-10772f98318f","Type":"ContainerStarted","Data":"d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac"} Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.640823 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-mvfks" event={"ID":"56e85ac6-319e-4d28-bef2-10772f98318f","Type":"ContainerStarted","Data":"ef647afe0afe767981bd5e93c7dee5d322a96347bde23f696c092b01138f927e"} Dec 04 15:23:55 crc kubenswrapper[4946]: I1204 15:23:55.640927 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fnf9d" event={"ID":"4c3653c9-449b-469f-8606-24c51c1cce6d","Type":"ContainerStarted","Data":"411fb98be291fe7d10f3d6bf8ae4981a43cea3e9e1839ab84433c981cd8e541f"} Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.302065 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.379108 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-dns-svc\") pod \"56e85ac6-319e-4d28-bef2-10772f98318f\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.379374 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-sb\") pod \"56e85ac6-319e-4d28-bef2-10772f98318f\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.379484 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-config\") pod \"56e85ac6-319e-4d28-bef2-10772f98318f\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.379505 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfgb2\" (UniqueName: \"kubernetes.io/projected/56e85ac6-319e-4d28-bef2-10772f98318f-kube-api-access-qfgb2\") pod \"56e85ac6-319e-4d28-bef2-10772f98318f\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.379542 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-nb\") pod \"56e85ac6-319e-4d28-bef2-10772f98318f\" (UID: \"56e85ac6-319e-4d28-bef2-10772f98318f\") " Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.454478 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56e85ac6-319e-4d28-bef2-10772f98318f-kube-api-access-qfgb2" (OuterVolumeSpecName: "kube-api-access-qfgb2") pod "56e85ac6-319e-4d28-bef2-10772f98318f" (UID: "56e85ac6-319e-4d28-bef2-10772f98318f"). InnerVolumeSpecName "kube-api-access-qfgb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.499739 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfgb2\" (UniqueName: \"kubernetes.io/projected/56e85ac6-319e-4d28-bef2-10772f98318f-kube-api-access-qfgb2\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.560882 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-config" (OuterVolumeSpecName: "config") pod "56e85ac6-319e-4d28-bef2-10772f98318f" (UID: "56e85ac6-319e-4d28-bef2-10772f98318f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.613491 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.634202 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "56e85ac6-319e-4d28-bef2-10772f98318f" (UID: "56e85ac6-319e-4d28-bef2-10772f98318f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.637539 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "56e85ac6-319e-4d28-bef2-10772f98318f" (UID: "56e85ac6-319e-4d28-bef2-10772f98318f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.647363 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "56e85ac6-319e-4d28-bef2-10772f98318f" (UID: "56e85ac6-319e-4d28-bef2-10772f98318f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.682382 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fnf9d" event={"ID":"4c3653c9-449b-469f-8606-24c51c1cce6d","Type":"ContainerStarted","Data":"cb124d636114954f01e8cce63ad55c49fbf55c8d5b57dc02efd225cb45a9b910"} Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.704585 4946 generic.go:334] "Generic (PLEG): container finished" podID="56e85ac6-319e-4d28-bef2-10772f98318f" containerID="d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac" exitCode=0 Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.704902 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-mvfks" event={"ID":"56e85ac6-319e-4d28-bef2-10772f98318f","Type":"ContainerDied","Data":"d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac"} Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.705024 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-mvfks" event={"ID":"56e85ac6-319e-4d28-bef2-10772f98318f","Type":"ContainerDied","Data":"ef647afe0afe767981bd5e93c7dee5d322a96347bde23f696c092b01138f927e"} Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.705105 4946 scope.go:117] "RemoveContainer" containerID="d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.705459 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-mvfks" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.710128 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.725185 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.725218 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.725228 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56e85ac6-319e-4d28-bef2-10772f98318f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.758724 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-fnf9d" podStartSLOduration=3.758691736 podStartE2EDuration="3.758691736s" podCreationTimestamp="2025-12-04 15:23:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:56.722306279 +0000 UTC m=+1287.608349920" watchObservedRunningTime="2025-12-04 15:23:56.758691736 +0000 UTC m=+1287.644735377" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.827039 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.896376 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-c2nbj"] Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.906851 4946 scope.go:117] "RemoveContainer" containerID="d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac" Dec 04 15:23:56 crc kubenswrapper[4946]: E1204 15:23:56.917617 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac\": container with ID starting with d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac not found: ID does not exist" containerID="d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.922656 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac"} err="failed to get container status \"d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac\": rpc error: code = NotFound desc = could not find container \"d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac\": container with ID starting with d3139c7570f56a3e8bd1f8c85db6eacc2ad549eb01483c74b8645e74985919ac not found: ID does not exist" Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.942068 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-cqwq4"] Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.964713 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-mvfks"] Dec 04 15:23:56 crc kubenswrapper[4946]: I1204 15:23:56.984455 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-fzhsp"] Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.004165 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-mvfks"] Dec 04 15:23:57 crc kubenswrapper[4946]: W1204 15:23:57.035078 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ff96dc8_21f8_4417_9d8e_8ee12f04a3ef.slice/crio-58e5b7eb191568a992818308ec7b640b35f355935848eeda9a7b53c21c9b1069 WatchSource:0}: Error finding container 58e5b7eb191568a992818308ec7b640b35f355935848eeda9a7b53c21c9b1069: Status 404 returned error can't find the container with id 58e5b7eb191568a992818308ec7b640b35f355935848eeda9a7b53c21c9b1069 Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.038096 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-w5njq"] Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.087226 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-gvts4"] Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.133402 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-x2xgf"] Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.223410 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-tvf2t"] Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.245179 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.475298 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56e85ac6-319e-4d28-bef2-10772f98318f" path="/var/lib/kubelet/pods/56e85ac6-319e-4d28-bef2-10772f98318f/volumes" Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.722673 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-tvf2t" event={"ID":"7a2d2577-fc5e-4375-8c8f-154aa218707f","Type":"ContainerStarted","Data":"0fba1fe3f01d482c1a37812162141a9e0c628b74d752c5e846727d0db86e926d"} Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.726532 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef","Type":"ContainerStarted","Data":"58e5b7eb191568a992818308ec7b640b35f355935848eeda9a7b53c21c9b1069"} Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.733695 4946 generic.go:334] "Generic (PLEG): container finished" podID="e10e4d14-3642-427f-aaf4-e54ce4e747a0" containerID="1360e8f14ddeea302f912bf5f3baee2c1d0ce835711cd08e0e02c8154a28f513" exitCode=0 Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.733791 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" event={"ID":"e10e4d14-3642-427f-aaf4-e54ce4e747a0","Type":"ContainerDied","Data":"1360e8f14ddeea302f912bf5f3baee2c1d0ce835711cd08e0e02c8154a28f513"} Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.733829 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" event={"ID":"e10e4d14-3642-427f-aaf4-e54ce4e747a0","Type":"ContainerStarted","Data":"21a086232f4e551dfac9c2adc5988ef5eafa323a770fd896a02b8e3b68437077"} Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.740312 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cqwq4" event={"ID":"8fe7f895-e33e-4159-9dcd-689158d16f22","Type":"ContainerStarted","Data":"62bee506dc90864125f6b59ac43c9865b746d3efeeeb0573076a29b2476c8078"} Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.763496 4946 generic.go:334] "Generic (PLEG): container finished" podID="539ee175-1c38-4ed1-8c41-52a5af211b83" containerID="62de0f529dcffe666f65efd385cf1cf20d0482562d18c84bbb69e0e7ca4b8431" exitCode=0 Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.763587 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" event={"ID":"539ee175-1c38-4ed1-8c41-52a5af211b83","Type":"ContainerDied","Data":"62de0f529dcffe666f65efd385cf1cf20d0482562d18c84bbb69e0e7ca4b8431"} Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.763637 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" event={"ID":"539ee175-1c38-4ed1-8c41-52a5af211b83","Type":"ContainerStarted","Data":"1a79a61c7dfb2cd02fa916a2d1fb4bfc420f821293d82cb564a454eee43f66d6"} Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.780576 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-w5njq" event={"ID":"c6df584d-65d7-4829-8937-3ac0ab49b71b","Type":"ContainerStarted","Data":"c5e7a94d74809ddde5b8b8e89b28964dc3e46b551c53d8f9eeed6649226bd260"} Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.802834 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-x2xgf" event={"ID":"53776995-5c2b-44a6-bbd2-ff624640c0b3","Type":"ContainerStarted","Data":"fb873cdca7ad35413129ab3ef76d927e7a73f30b001c4770a1d791971e331b12"} Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.802891 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-x2xgf" event={"ID":"53776995-5c2b-44a6-bbd2-ff624640c0b3","Type":"ContainerStarted","Data":"27a381e7cfad78fedea32fb0214f05a4dbffb13d7984f85974ae63e05333abb1"} Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.810557 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fzhsp" event={"ID":"79b3e881-2a15-43cf-aefa-b0b4dc1f5935","Type":"ContainerStarted","Data":"d0a15eb3c09e6adef35122a665a8e82f62bb0d7ed4986a4463f3463fbe292c58"} Dec 04 15:23:57 crc kubenswrapper[4946]: I1204 15:23:57.842326 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-x2xgf" podStartSLOduration=3.842281363 podStartE2EDuration="3.842281363s" podCreationTimestamp="2025-12-04 15:23:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:57.828523796 +0000 UTC m=+1288.714567447" watchObservedRunningTime="2025-12-04 15:23:57.842281363 +0000 UTC m=+1288.728325014" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.185789 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.295588 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-nb\") pod \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.295689 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-sb\") pod \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.295731 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc8f6\" (UniqueName: \"kubernetes.io/projected/e10e4d14-3642-427f-aaf4-e54ce4e747a0-kube-api-access-fc8f6\") pod \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.295780 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-svc\") pod \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.296049 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-config\") pod \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.296130 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-swift-storage-0\") pod \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\" (UID: \"e10e4d14-3642-427f-aaf4-e54ce4e747a0\") " Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.334581 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e10e4d14-3642-427f-aaf4-e54ce4e747a0" (UID: "e10e4d14-3642-427f-aaf4-e54ce4e747a0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.338202 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-config" (OuterVolumeSpecName: "config") pod "e10e4d14-3642-427f-aaf4-e54ce4e747a0" (UID: "e10e4d14-3642-427f-aaf4-e54ce4e747a0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.350381 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e10e4d14-3642-427f-aaf4-e54ce4e747a0-kube-api-access-fc8f6" (OuterVolumeSpecName: "kube-api-access-fc8f6") pod "e10e4d14-3642-427f-aaf4-e54ce4e747a0" (UID: "e10e4d14-3642-427f-aaf4-e54ce4e747a0"). InnerVolumeSpecName "kube-api-access-fc8f6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.359798 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e10e4d14-3642-427f-aaf4-e54ce4e747a0" (UID: "e10e4d14-3642-427f-aaf4-e54ce4e747a0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.382494 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e10e4d14-3642-427f-aaf4-e54ce4e747a0" (UID: "e10e4d14-3642-427f-aaf4-e54ce4e747a0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.388888 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e10e4d14-3642-427f-aaf4-e54ce4e747a0" (UID: "e10e4d14-3642-427f-aaf4-e54ce4e747a0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.403089 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.403188 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.403204 4946 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.403220 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.403231 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e10e4d14-3642-427f-aaf4-e54ce4e747a0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.403240 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc8f6\" (UniqueName: \"kubernetes.io/projected/e10e4d14-3642-427f-aaf4-e54ce4e747a0-kube-api-access-fc8f6\") on node \"crc\" DevicePath \"\"" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.854179 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" event={"ID":"e10e4d14-3642-427f-aaf4-e54ce4e747a0","Type":"ContainerDied","Data":"21a086232f4e551dfac9c2adc5988ef5eafa323a770fd896a02b8e3b68437077"} Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.854245 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-c2nbj" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.854253 4946 scope.go:117] "RemoveContainer" containerID="1360e8f14ddeea302f912bf5f3baee2c1d0ce835711cd08e0e02c8154a28f513" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.867462 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" event={"ID":"539ee175-1c38-4ed1-8c41-52a5af211b83","Type":"ContainerStarted","Data":"2af8a4bb0fe3d5278ad3453c532e22348c421c345a099eea9a176bef5f133930"} Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.867966 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.921200 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" podStartSLOduration=4.921165301 podStartE2EDuration="4.921165301s" podCreationTimestamp="2025-12-04 15:23:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:23:58.90578959 +0000 UTC m=+1289.791833251" watchObservedRunningTime="2025-12-04 15:23:58.921165301 +0000 UTC m=+1289.807208962" Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.973634 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-c2nbj"] Dec 04 15:23:58 crc kubenswrapper[4946]: I1204 15:23:58.982365 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-c2nbj"] Dec 04 15:23:59 crc kubenswrapper[4946]: I1204 15:23:59.480701 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e10e4d14-3642-427f-aaf4-e54ce4e747a0" path="/var/lib/kubelet/pods/e10e4d14-3642-427f-aaf4-e54ce4e747a0/volumes" Dec 04 15:24:01 crc kubenswrapper[4946]: I1204 15:24:01.941084 4946 generic.go:334] "Generic (PLEG): container finished" podID="4c3653c9-449b-469f-8606-24c51c1cce6d" containerID="cb124d636114954f01e8cce63ad55c49fbf55c8d5b57dc02efd225cb45a9b910" exitCode=0 Dec 04 15:24:01 crc kubenswrapper[4946]: I1204 15:24:01.941198 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fnf9d" event={"ID":"4c3653c9-449b-469f-8606-24c51c1cce6d","Type":"ContainerDied","Data":"cb124d636114954f01e8cce63ad55c49fbf55c8d5b57dc02efd225cb45a9b910"} Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.498720 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.567659 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-combined-ca-bundle\") pod \"4c3653c9-449b-469f-8606-24c51c1cce6d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.567914 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-scripts\") pod \"4c3653c9-449b-469f-8606-24c51c1cce6d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.568076 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-config-data\") pod \"4c3653c9-449b-469f-8606-24c51c1cce6d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.568127 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-fernet-keys\") pod \"4c3653c9-449b-469f-8606-24c51c1cce6d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.568231 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-credential-keys\") pod \"4c3653c9-449b-469f-8606-24c51c1cce6d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.568426 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zsjg\" (UniqueName: \"kubernetes.io/projected/4c3653c9-449b-469f-8606-24c51c1cce6d-kube-api-access-4zsjg\") pod \"4c3653c9-449b-469f-8606-24c51c1cce6d\" (UID: \"4c3653c9-449b-469f-8606-24c51c1cce6d\") " Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.577288 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4c3653c9-449b-469f-8606-24c51c1cce6d" (UID: "4c3653c9-449b-469f-8606-24c51c1cce6d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.577792 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-scripts" (OuterVolumeSpecName: "scripts") pod "4c3653c9-449b-469f-8606-24c51c1cce6d" (UID: "4c3653c9-449b-469f-8606-24c51c1cce6d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.580441 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c3653c9-449b-469f-8606-24c51c1cce6d-kube-api-access-4zsjg" (OuterVolumeSpecName: "kube-api-access-4zsjg") pod "4c3653c9-449b-469f-8606-24c51c1cce6d" (UID: "4c3653c9-449b-469f-8606-24c51c1cce6d"). InnerVolumeSpecName "kube-api-access-4zsjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.580828 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4c3653c9-449b-469f-8606-24c51c1cce6d" (UID: "4c3653c9-449b-469f-8606-24c51c1cce6d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.613776 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-config-data" (OuterVolumeSpecName: "config-data") pod "4c3653c9-449b-469f-8606-24c51c1cce6d" (UID: "4c3653c9-449b-469f-8606-24c51c1cce6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.614475 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c3653c9-449b-469f-8606-24c51c1cce6d" (UID: "4c3653c9-449b-469f-8606-24c51c1cce6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.673147 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zsjg\" (UniqueName: \"kubernetes.io/projected/4c3653c9-449b-469f-8606-24c51c1cce6d-kube-api-access-4zsjg\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.673182 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.673192 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.673201 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.673211 4946 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.673223 4946 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4c3653c9-449b-469f-8606-24c51c1cce6d-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.970609 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-fnf9d" event={"ID":"4c3653c9-449b-469f-8606-24c51c1cce6d","Type":"ContainerDied","Data":"411fb98be291fe7d10f3d6bf8ae4981a43cea3e9e1839ab84433c981cd8e541f"} Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.970669 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="411fb98be291fe7d10f3d6bf8ae4981a43cea3e9e1839ab84433c981cd8e541f" Dec 04 15:24:03 crc kubenswrapper[4946]: I1204 15:24:03.970748 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-fnf9d" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.165194 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-fnf9d"] Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.176131 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-fnf9d"] Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.327229 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-q4qx6"] Dec 04 15:24:04 crc kubenswrapper[4946]: E1204 15:24:04.328053 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c3653c9-449b-469f-8606-24c51c1cce6d" containerName="keystone-bootstrap" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.330457 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c3653c9-449b-469f-8606-24c51c1cce6d" containerName="keystone-bootstrap" Dec 04 15:24:04 crc kubenswrapper[4946]: E1204 15:24:04.330648 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e10e4d14-3642-427f-aaf4-e54ce4e747a0" containerName="init" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.330662 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e10e4d14-3642-427f-aaf4-e54ce4e747a0" containerName="init" Dec 04 15:24:04 crc kubenswrapper[4946]: E1204 15:24:04.330686 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e85ac6-319e-4d28-bef2-10772f98318f" containerName="init" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.330695 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e85ac6-319e-4d28-bef2-10772f98318f" containerName="init" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.331376 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e10e4d14-3642-427f-aaf4-e54ce4e747a0" containerName="init" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.331419 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c3653c9-449b-469f-8606-24c51c1cce6d" containerName="keystone-bootstrap" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.331436 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e85ac6-319e-4d28-bef2-10772f98318f" containerName="init" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.333087 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.338199 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.338818 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.338981 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-d8v6l" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.339605 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.350250 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q4qx6"] Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.388415 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-combined-ca-bundle\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.388484 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-config-data\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.388557 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-scripts\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.388605 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-credential-keys\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.388721 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-fernet-keys\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.388740 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x6t4\" (UniqueName: \"kubernetes.io/projected/c94e50af-9ae2-4ed6-a351-ccff8209cd55-kube-api-access-2x6t4\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.490639 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-scripts\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.490747 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-credential-keys\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.490864 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-fernet-keys\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.490889 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x6t4\" (UniqueName: \"kubernetes.io/projected/c94e50af-9ae2-4ed6-a351-ccff8209cd55-kube-api-access-2x6t4\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.490946 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-combined-ca-bundle\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.490975 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-config-data\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.496553 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-config-data\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.497319 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-scripts\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.497709 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-combined-ca-bundle\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.498842 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-fernet-keys\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.510097 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x6t4\" (UniqueName: \"kubernetes.io/projected/c94e50af-9ae2-4ed6-a351-ccff8209cd55-kube-api-access-2x6t4\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.511809 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-credential-keys\") pod \"keystone-bootstrap-q4qx6\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:04 crc kubenswrapper[4946]: I1204 15:24:04.665279 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:05 crc kubenswrapper[4946]: I1204 15:24:05.408021 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:24:05 crc kubenswrapper[4946]: I1204 15:24:05.492281 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c3653c9-449b-469f-8606-24c51c1cce6d" path="/var/lib/kubelet/pods/4c3653c9-449b-469f-8606-24c51c1cce6d/volumes" Dec 04 15:24:05 crc kubenswrapper[4946]: I1204 15:24:05.531149 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-q8nbg"] Dec 04 15:24:05 crc kubenswrapper[4946]: I1204 15:24:05.531537 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-q8nbg" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerName="dnsmasq-dns" containerID="cri-o://cf3a54599d48a3aa5ac927ea96645e60a0b407ae37ce540b2dcc44382f2c8032" gracePeriod=10 Dec 04 15:24:06 crc kubenswrapper[4946]: I1204 15:24:06.008880 4946 generic.go:334] "Generic (PLEG): container finished" podID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerID="cf3a54599d48a3aa5ac927ea96645e60a0b407ae37ce540b2dcc44382f2c8032" exitCode=0 Dec 04 15:24:06 crc kubenswrapper[4946]: I1204 15:24:06.008969 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-q8nbg" event={"ID":"b3d07afb-2b13-4962-a233-2fc779f21f68","Type":"ContainerDied","Data":"cf3a54599d48a3aa5ac927ea96645e60a0b407ae37ce540b2dcc44382f2c8032"} Dec 04 15:24:06 crc kubenswrapper[4946]: I1204 15:24:06.012884 4946 generic.go:334] "Generic (PLEG): container finished" podID="2a4e74d1-f18d-4356-be6d-10171056d511" containerID="cef4183916354fd644469fc88df61c2c5b305125508468352a41d6b663967ffd" exitCode=0 Dec 04 15:24:06 crc kubenswrapper[4946]: I1204 15:24:06.012952 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-tb5mp" event={"ID":"2a4e74d1-f18d-4356-be6d-10171056d511","Type":"ContainerDied","Data":"cef4183916354fd644469fc88df61c2c5b305125508468352a41d6b663967ffd"} Dec 04 15:24:06 crc kubenswrapper[4946]: I1204 15:24:06.827753 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 04 15:24:06 crc kubenswrapper[4946]: I1204 15:24:06.836229 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 04 15:24:07 crc kubenswrapper[4946]: I1204 15:24:07.029131 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 04 15:24:07 crc kubenswrapper[4946]: I1204 15:24:07.699382 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-q8nbg" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: connect: connection refused" Dec 04 15:24:12 crc kubenswrapper[4946]: I1204 15:24:12.700465 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-q8nbg" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: connect: connection refused" Dec 04 15:24:17 crc kubenswrapper[4946]: I1204 15:24:17.700236 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-q8nbg" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: connect: connection refused" Dec 04 15:24:17 crc kubenswrapper[4946]: I1204 15:24:17.701060 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.553355 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-tb5mp" Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.665588 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2l9z9\" (UniqueName: \"kubernetes.io/projected/2a4e74d1-f18d-4356-be6d-10171056d511-kube-api-access-2l9z9\") pod \"2a4e74d1-f18d-4356-be6d-10171056d511\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.665665 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-config-data\") pod \"2a4e74d1-f18d-4356-be6d-10171056d511\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.665854 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-combined-ca-bundle\") pod \"2a4e74d1-f18d-4356-be6d-10171056d511\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.666050 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-db-sync-config-data\") pod \"2a4e74d1-f18d-4356-be6d-10171056d511\" (UID: \"2a4e74d1-f18d-4356-be6d-10171056d511\") " Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.679850 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a4e74d1-f18d-4356-be6d-10171056d511-kube-api-access-2l9z9" (OuterVolumeSpecName: "kube-api-access-2l9z9") pod "2a4e74d1-f18d-4356-be6d-10171056d511" (UID: "2a4e74d1-f18d-4356-be6d-10171056d511"). InnerVolumeSpecName "kube-api-access-2l9z9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.680815 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2a4e74d1-f18d-4356-be6d-10171056d511" (UID: "2a4e74d1-f18d-4356-be6d-10171056d511"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.707666 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a4e74d1-f18d-4356-be6d-10171056d511" (UID: "2a4e74d1-f18d-4356-be6d-10171056d511"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.733579 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-config-data" (OuterVolumeSpecName: "config-data") pod "2a4e74d1-f18d-4356-be6d-10171056d511" (UID: "2a4e74d1-f18d-4356-be6d-10171056d511"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.769204 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2l9z9\" (UniqueName: \"kubernetes.io/projected/2a4e74d1-f18d-4356-be6d-10171056d511-kube-api-access-2l9z9\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.769250 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.769261 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:18 crc kubenswrapper[4946]: I1204 15:24:18.769270 4946 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2a4e74d1-f18d-4356-be6d-10171056d511-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:19 crc kubenswrapper[4946]: I1204 15:24:19.165847 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-tb5mp" event={"ID":"2a4e74d1-f18d-4356-be6d-10171056d511","Type":"ContainerDied","Data":"8386bca653e69858b62b0586af7f81ab948998fea9477cdecb0ba14f2e8070fe"} Dec 04 15:24:19 crc kubenswrapper[4946]: I1204 15:24:19.166196 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8386bca653e69858b62b0586af7f81ab948998fea9477cdecb0ba14f2e8070fe" Dec 04 15:24:19 crc kubenswrapper[4946]: I1204 15:24:19.166009 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-tb5mp" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.142976 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-lvqp5"] Dec 04 15:24:20 crc kubenswrapper[4946]: E1204 15:24:20.144258 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a4e74d1-f18d-4356-be6d-10171056d511" containerName="glance-db-sync" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.144286 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a4e74d1-f18d-4356-be6d-10171056d511" containerName="glance-db-sync" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.145561 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a4e74d1-f18d-4356-be6d-10171056d511" containerName="glance-db-sync" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.150941 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.192265 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-lvqp5"] Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.224003 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.224107 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.224138 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.224160 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-config\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.224191 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ksps\" (UniqueName: \"kubernetes.io/projected/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-kube-api-access-2ksps\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.224263 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.326378 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.326519 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-config\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.326547 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.326567 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.326600 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ksps\" (UniqueName: \"kubernetes.io/projected/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-kube-api-access-2ksps\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.326710 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.330018 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.334024 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-config\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.335944 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.336451 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.338563 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.351614 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ksps\" (UniqueName: \"kubernetes.io/projected/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-kube-api-access-2ksps\") pod \"dnsmasq-dns-785d8bcb8c-lvqp5\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.506557 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.993380 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:24:20 crc kubenswrapper[4946]: I1204 15:24:20.998671 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.002627 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-w4thl" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.002652 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.002902 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.014904 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.143754 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.143852 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-logs\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.143882 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-config-data\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.143941 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-scripts\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.143973 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.143998 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pf9g\" (UniqueName: \"kubernetes.io/projected/a13d6013-a7ee-49b5-be0d-b9c41b531916-kube-api-access-6pf9g\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.144030 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.246327 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.246439 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-logs\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.246471 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-config-data\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.246544 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-scripts\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.246582 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.246616 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pf9g\" (UniqueName: \"kubernetes.io/projected/a13d6013-a7ee-49b5-be0d-b9c41b531916-kube-api-access-6pf9g\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.246656 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.246912 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.247338 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-logs\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.253785 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-scripts\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.254820 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.254861 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f3260bbd2cdb28f5793a9d1edc63c254c747b9e66cce69dc3f280fc78b1b134b/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.255448 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-config-data\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.256326 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.269554 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pf9g\" (UniqueName: \"kubernetes.io/projected/a13d6013-a7ee-49b5-be0d-b9c41b531916-kube-api-access-6pf9g\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.304973 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.327839 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.333186 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.335405 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.342029 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.344769 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.457380 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.457980 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-config-data\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.458129 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-scripts\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.458248 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gd8j\" (UniqueName: \"kubernetes.io/projected/81c04277-0e34-46c8-b207-f4988304e238-kube-api-access-4gd8j\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.458388 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.458591 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.458746 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-logs\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.560613 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.560714 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-config-data\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.560745 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-scripts\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.560787 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gd8j\" (UniqueName: \"kubernetes.io/projected/81c04277-0e34-46c8-b207-f4988304e238-kube-api-access-4gd8j\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.560825 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.560898 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.560956 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-logs\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.561495 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-logs\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.565073 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.569479 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-scripts\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.571236 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-config-data\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.573955 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.574018 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d80d3a2b37ec7093e1c47ec6e0b9eb3b02741300f62422d1aa8919218995c41a/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.583780 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.592043 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gd8j\" (UniqueName: \"kubernetes.io/projected/81c04277-0e34-46c8-b207-f4988304e238-kube-api-access-4gd8j\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.628746 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:21 crc kubenswrapper[4946]: I1204 15:24:21.704610 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 15:24:23 crc kubenswrapper[4946]: I1204 15:24:23.543145 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:24:23 crc kubenswrapper[4946]: I1204 15:24:23.625346 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:24:27 crc kubenswrapper[4946]: I1204 15:24:27.700427 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-q8nbg" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 04 15:24:28 crc kubenswrapper[4946]: I1204 15:24:28.391473 4946 generic.go:334] "Generic (PLEG): container finished" podID="53776995-5c2b-44a6-bbd2-ff624640c0b3" containerID="fb873cdca7ad35413129ab3ef76d927e7a73f30b001c4770a1d791971e331b12" exitCode=0 Dec 04 15:24:28 crc kubenswrapper[4946]: I1204 15:24:28.391535 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-x2xgf" event={"ID":"53776995-5c2b-44a6-bbd2-ff624640c0b3","Type":"ContainerDied","Data":"fb873cdca7ad35413129ab3ef76d927e7a73f30b001c4770a1d791971e331b12"} Dec 04 15:24:31 crc kubenswrapper[4946]: E1204 15:24:31.300799 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 04 15:24:31 crc kubenswrapper[4946]: E1204 15:24:31.301817 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kvw6r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-tvf2t_openstack(7a2d2577-fc5e-4375-8c8f-154aa218707f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:24:31 crc kubenswrapper[4946]: E1204 15:24:31.302947 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-tvf2t" podUID="7a2d2577-fc5e-4375-8c8f-154aa218707f" Dec 04 15:24:31 crc kubenswrapper[4946]: E1204 15:24:31.437885 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-tvf2t" podUID="7a2d2577-fc5e-4375-8c8f-154aa218707f" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.798279 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.809692 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.818309 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhtqm\" (UniqueName: \"kubernetes.io/projected/53776995-5c2b-44a6-bbd2-ff624640c0b3-kube-api-access-zhtqm\") pod \"53776995-5c2b-44a6-bbd2-ff624640c0b3\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.818458 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-config\") pod \"53776995-5c2b-44a6-bbd2-ff624640c0b3\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.818563 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-combined-ca-bundle\") pod \"53776995-5c2b-44a6-bbd2-ff624640c0b3\" (UID: \"53776995-5c2b-44a6-bbd2-ff624640c0b3\") " Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.845396 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53776995-5c2b-44a6-bbd2-ff624640c0b3-kube-api-access-zhtqm" (OuterVolumeSpecName: "kube-api-access-zhtqm") pod "53776995-5c2b-44a6-bbd2-ff624640c0b3" (UID: "53776995-5c2b-44a6-bbd2-ff624640c0b3"). InnerVolumeSpecName "kube-api-access-zhtqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.861872 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53776995-5c2b-44a6-bbd2-ff624640c0b3" (UID: "53776995-5c2b-44a6-bbd2-ff624640c0b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.890603 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-config" (OuterVolumeSpecName: "config") pod "53776995-5c2b-44a6-bbd2-ff624640c0b3" (UID: "53776995-5c2b-44a6-bbd2-ff624640c0b3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.921090 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzd8c\" (UniqueName: \"kubernetes.io/projected/b3d07afb-2b13-4962-a233-2fc779f21f68-kube-api-access-dzd8c\") pod \"b3d07afb-2b13-4962-a233-2fc779f21f68\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.921286 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-config\") pod \"b3d07afb-2b13-4962-a233-2fc779f21f68\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.921442 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-sb\") pod \"b3d07afb-2b13-4962-a233-2fc779f21f68\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.921481 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-dns-svc\") pod \"b3d07afb-2b13-4962-a233-2fc779f21f68\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.921558 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-nb\") pod \"b3d07afb-2b13-4962-a233-2fc779f21f68\" (UID: \"b3d07afb-2b13-4962-a233-2fc779f21f68\") " Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.922448 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhtqm\" (UniqueName: \"kubernetes.io/projected/53776995-5c2b-44a6-bbd2-ff624640c0b3-kube-api-access-zhtqm\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.922472 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.922485 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53776995-5c2b-44a6-bbd2-ff624640c0b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.932508 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3d07afb-2b13-4962-a233-2fc779f21f68-kube-api-access-dzd8c" (OuterVolumeSpecName: "kube-api-access-dzd8c") pod "b3d07afb-2b13-4962-a233-2fc779f21f68" (UID: "b3d07afb-2b13-4962-a233-2fc779f21f68"). InnerVolumeSpecName "kube-api-access-dzd8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.975331 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-config" (OuterVolumeSpecName: "config") pod "b3d07afb-2b13-4962-a233-2fc779f21f68" (UID: "b3d07afb-2b13-4962-a233-2fc779f21f68"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.977040 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b3d07afb-2b13-4962-a233-2fc779f21f68" (UID: "b3d07afb-2b13-4962-a233-2fc779f21f68"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:31 crc kubenswrapper[4946]: I1204 15:24:31.986915 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b3d07afb-2b13-4962-a233-2fc779f21f68" (UID: "b3d07afb-2b13-4962-a233-2fc779f21f68"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.000890 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b3d07afb-2b13-4962-a233-2fc779f21f68" (UID: "b3d07afb-2b13-4962-a233-2fc779f21f68"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.024366 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.024420 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.024431 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.024443 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzd8c\" (UniqueName: \"kubernetes.io/projected/b3d07afb-2b13-4962-a233-2fc779f21f68-kube-api-access-dzd8c\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.024456 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3d07afb-2b13-4962-a233-2fc779f21f68-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.448990 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-q8nbg" Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.448992 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-q8nbg" event={"ID":"b3d07afb-2b13-4962-a233-2fc779f21f68","Type":"ContainerDied","Data":"f7acbf6b1a34060f11c771df28cd2280b07f824ca149417a128b764939aefba4"} Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.449592 4946 scope.go:117] "RemoveContainer" containerID="cf3a54599d48a3aa5ac927ea96645e60a0b407ae37ce540b2dcc44382f2c8032" Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.451232 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-x2xgf" event={"ID":"53776995-5c2b-44a6-bbd2-ff624640c0b3","Type":"ContainerDied","Data":"27a381e7cfad78fedea32fb0214f05a4dbffb13d7984f85974ae63e05333abb1"} Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.451288 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27a381e7cfad78fedea32fb0214f05a4dbffb13d7984f85974ae63e05333abb1" Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.451366 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-x2xgf" Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.514938 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-q8nbg"] Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.528083 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-q8nbg"] Dec 04 15:24:32 crc kubenswrapper[4946]: I1204 15:24:32.702067 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-q8nbg" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.136251 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-lvqp5"] Dec 04 15:24:33 crc kubenswrapper[4946]: E1204 15:24:33.173426 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 04 15:24:33 crc kubenswrapper[4946]: E1204 15:24:33.173690 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5bjrd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-cqwq4_openstack(8fe7f895-e33e-4159-9dcd-689158d16f22): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:24:33 crc kubenswrapper[4946]: E1204 15:24:33.175597 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-cqwq4" podUID="8fe7f895-e33e-4159-9dcd-689158d16f22" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.185132 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-vpcnk"] Dec 04 15:24:33 crc kubenswrapper[4946]: E1204 15:24:33.187583 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerName="init" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.187629 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerName="init" Dec 04 15:24:33 crc kubenswrapper[4946]: E1204 15:24:33.187655 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53776995-5c2b-44a6-bbd2-ff624640c0b3" containerName="neutron-db-sync" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.187662 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="53776995-5c2b-44a6-bbd2-ff624640c0b3" containerName="neutron-db-sync" Dec 04 15:24:33 crc kubenswrapper[4946]: E1204 15:24:33.187677 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerName="dnsmasq-dns" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.187683 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerName="dnsmasq-dns" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.187951 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="53776995-5c2b-44a6-bbd2-ff624640c0b3" containerName="neutron-db-sync" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.187971 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" containerName="dnsmasq-dns" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.190381 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.212593 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-vpcnk"] Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.273958 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6bf8c9bfd6-lcqwj"] Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.281246 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.282611 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvpmd\" (UniqueName: \"kubernetes.io/projected/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-kube-api-access-kvpmd\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.282739 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.282893 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-config\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.282940 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.283127 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.283314 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-svc\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.285031 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.285285 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.285465 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.285617 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-mjj4j" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.295979 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6bf8c9bfd6-lcqwj"] Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.385658 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.385754 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-config\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.385840 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-httpd-config\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.385922 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmkqd\" (UniqueName: \"kubernetes.io/projected/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-kube-api-access-gmkqd\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.385979 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-config\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.385997 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.386042 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-combined-ca-bundle\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.386881 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.386879 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.386947 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.386975 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-svc\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.387005 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-ovndb-tls-certs\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.387063 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvpmd\" (UniqueName: \"kubernetes.io/projected/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-kube-api-access-kvpmd\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.387185 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-config\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.387845 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-svc\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.388268 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.412012 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvpmd\" (UniqueName: \"kubernetes.io/projected/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-kube-api-access-kvpmd\") pod \"dnsmasq-dns-55f844cf75-vpcnk\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: E1204 15:24:33.476389 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-cqwq4" podUID="8fe7f895-e33e-4159-9dcd-689158d16f22" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.490428 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3d07afb-2b13-4962-a233-2fc779f21f68" path="/var/lib/kubelet/pods/b3d07afb-2b13-4962-a233-2fc779f21f68/volumes" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.491799 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-config\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.491878 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-httpd-config\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.491905 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmkqd\" (UniqueName: \"kubernetes.io/projected/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-kube-api-access-gmkqd\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.491954 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-combined-ca-bundle\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.492040 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-ovndb-tls-certs\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.497210 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-httpd-config\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.497339 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-ovndb-tls-certs\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.500671 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-config\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.502919 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-combined-ca-bundle\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.518602 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmkqd\" (UniqueName: \"kubernetes.io/projected/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-kube-api-access-gmkqd\") pod \"neutron-6bf8c9bfd6-lcqwj\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.537286 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:33 crc kubenswrapper[4946]: I1204 15:24:33.616240 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.549334 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-9c7bc6557-kqv86"] Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.551667 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.554338 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.554540 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.576183 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9c7bc6557-kqv86"] Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.644910 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-internal-tls-certs\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.645365 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-combined-ca-bundle\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.645414 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-ovndb-tls-certs\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.645451 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-config\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.645801 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t688b\" (UniqueName: \"kubernetes.io/projected/5ff5d11b-6f56-4794-97a4-172ef873766c-kube-api-access-t688b\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.645926 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-httpd-config\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.646029 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-public-tls-certs\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.748272 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-config\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.748414 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t688b\" (UniqueName: \"kubernetes.io/projected/5ff5d11b-6f56-4794-97a4-172ef873766c-kube-api-access-t688b\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.748450 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-httpd-config\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.748491 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-public-tls-certs\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.748545 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-internal-tls-certs\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.748579 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-combined-ca-bundle\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.748627 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-ovndb-tls-certs\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.755824 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-ovndb-tls-certs\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.756168 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-public-tls-certs\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.757212 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-config\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.759517 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-httpd-config\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.761770 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-combined-ca-bundle\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.775830 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t688b\" (UniqueName: \"kubernetes.io/projected/5ff5d11b-6f56-4794-97a4-172ef873766c-kube-api-access-t688b\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.778170 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff5d11b-6f56-4794-97a4-172ef873766c-internal-tls-certs\") pod \"neutron-9c7bc6557-kqv86\" (UID: \"5ff5d11b-6f56-4794-97a4-172ef873766c\") " pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:35 crc kubenswrapper[4946]: I1204 15:24:35.876934 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:36 crc kubenswrapper[4946]: I1204 15:24:36.640978 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q4qx6"] Dec 04 15:24:38 crc kubenswrapper[4946]: I1204 15:24:38.728155 4946 scope.go:117] "RemoveContainer" containerID="035f3b1e9084cb9762645b18e5340a5aecb57620cb9f5abb332e59e6c93cdd19" Dec 04 15:24:38 crc kubenswrapper[4946]: W1204 15:24:38.764602 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc94e50af_9ae2_4ed6_a351_ccff8209cd55.slice/crio-898bed60fc52bee4d18bdefc21a72390a27899b42c5072dcc2fd89312a28ac74 WatchSource:0}: Error finding container 898bed60fc52bee4d18bdefc21a72390a27899b42c5072dcc2fd89312a28ac74: Status 404 returned error can't find the container with id 898bed60fc52bee4d18bdefc21a72390a27899b42c5072dcc2fd89312a28ac74 Dec 04 15:24:39 crc kubenswrapper[4946]: I1204 15:24:39.192231 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-lvqp5"] Dec 04 15:24:39 crc kubenswrapper[4946]: W1204 15:24:39.348219 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc889ed87_8aa4_4f9a_8191_d93ff43d7bf1.slice/crio-53c7c3d2e435e25b53f54b820085d926224bed3eeeb78bfb8a1429e7cbedde66 WatchSource:0}: Error finding container 53c7c3d2e435e25b53f54b820085d926224bed3eeeb78bfb8a1429e7cbedde66: Status 404 returned error can't find the container with id 53c7c3d2e435e25b53f54b820085d926224bed3eeeb78bfb8a1429e7cbedde66 Dec 04 15:24:39 crc kubenswrapper[4946]: E1204 15:24:39.389347 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Dec 04 15:24:39 crc kubenswrapper[4946]: E1204 15:24:39.389428 4946 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Dec 04 15:24:39 crc kubenswrapper[4946]: E1204 15:24:39.389616 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-td52c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-w5njq_openstack(c6df584d-65d7-4829-8937-3ac0ab49b71b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:24:39 crc kubenswrapper[4946]: E1204 15:24:39.391145 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-w5njq" podUID="c6df584d-65d7-4829-8937-3ac0ab49b71b" Dec 04 15:24:39 crc kubenswrapper[4946]: I1204 15:24:39.566390 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4qx6" event={"ID":"c94e50af-9ae2-4ed6-a351-ccff8209cd55","Type":"ContainerStarted","Data":"898bed60fc52bee4d18bdefc21a72390a27899b42c5072dcc2fd89312a28ac74"} Dec 04 15:24:39 crc kubenswrapper[4946]: I1204 15:24:39.576795 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" event={"ID":"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1","Type":"ContainerStarted","Data":"53c7c3d2e435e25b53f54b820085d926224bed3eeeb78bfb8a1429e7cbedde66"} Dec 04 15:24:39 crc kubenswrapper[4946]: E1204 15:24:39.584688 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current\\\"\"" pod="openstack/cloudkitty-db-sync-w5njq" podUID="c6df584d-65d7-4829-8937-3ac0ab49b71b" Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.048367 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.198052 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.437555 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-vpcnk"] Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.575865 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9c7bc6557-kqv86"] Dec 04 15:24:40 crc kubenswrapper[4946]: W1204 15:24:40.589055 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ff5d11b_6f56_4794_97a4_172ef873766c.slice/crio-386249d09162c383482079b96b06cab7a15f2c69f2a6aaca9eeaf1d50d9326a7 WatchSource:0}: Error finding container 386249d09162c383482079b96b06cab7a15f2c69f2a6aaca9eeaf1d50d9326a7: Status 404 returned error can't find the container with id 386249d09162c383482079b96b06cab7a15f2c69f2a6aaca9eeaf1d50d9326a7 Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.596829 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4qx6" event={"ID":"c94e50af-9ae2-4ed6-a351-ccff8209cd55","Type":"ContainerStarted","Data":"69d68757db23ce226565173643b4ba8f92219d161e8d4837acffdeb9b1a1ed47"} Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.603291 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81c04277-0e34-46c8-b207-f4988304e238","Type":"ContainerStarted","Data":"b4a595216431109a8c7af1f3b938c7c4b426d10239c5d744761b65118ddfb91c"} Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.607873 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.612023 4946 generic.go:334] "Generic (PLEG): container finished" podID="c889ed87-8aa4-4f9a-8191-d93ff43d7bf1" containerID="61d701c0424ec47c15ac25d16308ca8f84a9b41a2337b53d193a724853a2d256" exitCode=0 Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.612110 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" event={"ID":"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1","Type":"ContainerDied","Data":"61d701c0424ec47c15ac25d16308ca8f84a9b41a2337b53d193a724853a2d256"} Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.619346 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" event={"ID":"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144","Type":"ContainerStarted","Data":"83e337d1cf8e55c96dc52bbea8a2d75527c5e4bc9bd81ea0358ff233f56f4a34"} Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.631152 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef","Type":"ContainerStarted","Data":"96aa62470cc13084bd513c6d04189c795a7f8a0a359dcfc4789f76c936147981"} Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.640586 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-q4qx6" podStartSLOduration=36.640551322 podStartE2EDuration="36.640551322s" podCreationTimestamp="2025-12-04 15:24:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:24:40.630397194 +0000 UTC m=+1331.516440825" watchObservedRunningTime="2025-12-04 15:24:40.640551322 +0000 UTC m=+1331.526594963" Dec 04 15:24:40 crc kubenswrapper[4946]: W1204 15:24:40.648779 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda13d6013_a7ee_49b5_be0d_b9c41b531916.slice/crio-d997c89ed5fac0dee2cfcf251688f4bb82c1380cdfc86058a2879bcc1d5f21d7 WatchSource:0}: Error finding container d997c89ed5fac0dee2cfcf251688f4bb82c1380cdfc86058a2879bcc1d5f21d7: Status 404 returned error can't find the container with id d997c89ed5fac0dee2cfcf251688f4bb82c1380cdfc86058a2879bcc1d5f21d7 Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.660387 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fzhsp" event={"ID":"79b3e881-2a15-43cf-aefa-b0b4dc1f5935","Type":"ContainerStarted","Data":"92d9c0475208571121aca1c206c5f8e190ab1b720f04730a7c8f3d0143b3de7b"} Dec 04 15:24:40 crc kubenswrapper[4946]: I1204 15:24:40.689211 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-fzhsp" podStartSLOduration=11.963006532 podStartE2EDuration="46.689187423s" podCreationTimestamp="2025-12-04 15:23:54 +0000 UTC" firstStartedPulling="2025-12-04 15:23:56.943612148 +0000 UTC m=+1287.829655789" lastFinishedPulling="2025-12-04 15:24:31.669793039 +0000 UTC m=+1322.555836680" observedRunningTime="2025-12-04 15:24:40.680524856 +0000 UTC m=+1331.566568517" watchObservedRunningTime="2025-12-04 15:24:40.689187423 +0000 UTC m=+1331.575231064" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.130400 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.236071 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-nb\") pod \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.236174 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-sb\") pod \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.236286 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ksps\" (UniqueName: \"kubernetes.io/projected/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-kube-api-access-2ksps\") pod \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.236330 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-swift-storage-0\") pod \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.236377 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-svc\") pod \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.236487 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-config\") pod \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\" (UID: \"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1\") " Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.249894 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-kube-api-access-2ksps" (OuterVolumeSpecName: "kube-api-access-2ksps") pod "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1" (UID: "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1"). InnerVolumeSpecName "kube-api-access-2ksps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.256196 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6bf8c9bfd6-lcqwj"] Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.328326 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1" (UID: "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.328272 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1" (UID: "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.335736 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1" (UID: "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.339959 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.340012 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ksps\" (UniqueName: \"kubernetes.io/projected/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-kube-api-access-2ksps\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.340029 4946 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.340043 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.365697 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-config" (OuterVolumeSpecName: "config") pod "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1" (UID: "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.372847 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1" (UID: "c889ed87-8aa4-4f9a-8191-d93ff43d7bf1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.441860 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.442237 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.690991 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" event={"ID":"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144","Type":"ContainerDied","Data":"e18351f352a6d42119d3043d041b04ed4bbbd19de51a58becd7d4dd001a3ba83"} Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.690999 4946 generic.go:334] "Generic (PLEG): container finished" podID="9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" containerID="e18351f352a6d42119d3043d041b04ed4bbbd19de51a58becd7d4dd001a3ba83" exitCode=0 Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.724626 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6bf8c9bfd6-lcqwj" event={"ID":"c5b6c82d-825a-448c-a03c-53dee6f4bfc0","Type":"ContainerStarted","Data":"6db82a66a2b56e240fabd9acf126caf510fd23306360ac6609d8db56861245de"} Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.724740 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6bf8c9bfd6-lcqwj" event={"ID":"c5b6c82d-825a-448c-a03c-53dee6f4bfc0","Type":"ContainerStarted","Data":"30d4b5b81ad6772a3397a56af948411e43e55872a9ce5ef8806c6113440f0186"} Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.737374 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a13d6013-a7ee-49b5-be0d-b9c41b531916","Type":"ContainerStarted","Data":"a081eefc7088f1bf0d59ef9b4781c2edb95563b471d5158073efa73273c843e1"} Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.738444 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a13d6013-a7ee-49b5-be0d-b9c41b531916","Type":"ContainerStarted","Data":"d997c89ed5fac0dee2cfcf251688f4bb82c1380cdfc86058a2879bcc1d5f21d7"} Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.753775 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9c7bc6557-kqv86" event={"ID":"5ff5d11b-6f56-4794-97a4-172ef873766c","Type":"ContainerStarted","Data":"4293cd7ac7f25cb2944931929bd48c04f9a51bf7799ee14d1dd2082f78e95a3a"} Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.753850 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9c7bc6557-kqv86" event={"ID":"5ff5d11b-6f56-4794-97a4-172ef873766c","Type":"ContainerStarted","Data":"650cc70e1d03d9269a1accc64072f458dbe1d2fc824fb23936d37505705bfb8d"} Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.753871 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9c7bc6557-kqv86" event={"ID":"5ff5d11b-6f56-4794-97a4-172ef873766c","Type":"ContainerStarted","Data":"386249d09162c383482079b96b06cab7a15f2c69f2a6aaca9eeaf1d50d9326a7"} Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.755395 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.785562 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81c04277-0e34-46c8-b207-f4988304e238","Type":"ContainerStarted","Data":"f776c8ed480eea46d7337c95e2bf9db6dea74b7d7ca40be059faab838f506cb8"} Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.815586 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" event={"ID":"c889ed87-8aa4-4f9a-8191-d93ff43d7bf1","Type":"ContainerDied","Data":"53c7c3d2e435e25b53f54b820085d926224bed3eeeb78bfb8a1429e7cbedde66"} Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.815681 4946 scope.go:117] "RemoveContainer" containerID="61d701c0424ec47c15ac25d16308ca8f84a9b41a2337b53d193a724853a2d256" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.816151 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-lvqp5" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.823349 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-9c7bc6557-kqv86" podStartSLOduration=6.823327074 podStartE2EDuration="6.823327074s" podCreationTimestamp="2025-12-04 15:24:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:24:41.799838491 +0000 UTC m=+1332.685882132" watchObservedRunningTime="2025-12-04 15:24:41.823327074 +0000 UTC m=+1332.709370715" Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.895212 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-lvqp5"] Dec 04 15:24:41 crc kubenswrapper[4946]: I1204 15:24:41.907094 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-lvqp5"] Dec 04 15:24:43 crc kubenswrapper[4946]: I1204 15:24:43.471139 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c889ed87-8aa4-4f9a-8191-d93ff43d7bf1" path="/var/lib/kubelet/pods/c889ed87-8aa4-4f9a-8191-d93ff43d7bf1/volumes" Dec 04 15:24:43 crc kubenswrapper[4946]: I1204 15:24:43.886227 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" event={"ID":"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144","Type":"ContainerStarted","Data":"1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e"} Dec 04 15:24:43 crc kubenswrapper[4946]: I1204 15:24:43.889592 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6bf8c9bfd6-lcqwj" event={"ID":"c5b6c82d-825a-448c-a03c-53dee6f4bfc0","Type":"ContainerStarted","Data":"50743d59f9fc4173f74e2772198eda1df47511c19225da642f2e66a708a571f1"} Dec 04 15:24:43 crc kubenswrapper[4946]: I1204 15:24:43.892890 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef","Type":"ContainerStarted","Data":"4178b6c53a12c8d92b1608ca5b654aebdc2ff59365faeaea745c2f1cc3e861fb"} Dec 04 15:24:43 crc kubenswrapper[4946]: I1204 15:24:43.895528 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81c04277-0e34-46c8-b207-f4988304e238","Type":"ContainerStarted","Data":"da6681e76bc0a4bbb934c1470829c49191bf1383edf9002fe356467821e7f89e"} Dec 04 15:24:44 crc kubenswrapper[4946]: I1204 15:24:44.906952 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a13d6013-a7ee-49b5-be0d-b9c41b531916","Type":"ContainerStarted","Data":"6834cd2e54980ceee681bdaf767d14f3087936e025513156c74cbec39436f806"} Dec 04 15:24:44 crc kubenswrapper[4946]: I1204 15:24:44.908610 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:44 crc kubenswrapper[4946]: I1204 15:24:44.943720 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" podStartSLOduration=11.943695074 podStartE2EDuration="11.943695074s" podCreationTimestamp="2025-12-04 15:24:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:24:44.927214673 +0000 UTC m=+1335.813258354" watchObservedRunningTime="2025-12-04 15:24:44.943695074 +0000 UTC m=+1335.829738715" Dec 04 15:24:44 crc kubenswrapper[4946]: I1204 15:24:44.964353 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6bf8c9bfd6-lcqwj" podStartSLOduration=11.964333819 podStartE2EDuration="11.964333819s" podCreationTimestamp="2025-12-04 15:24:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:24:44.945669588 +0000 UTC m=+1335.831713269" watchObservedRunningTime="2025-12-04 15:24:44.964333819 +0000 UTC m=+1335.850377460" Dec 04 15:24:45 crc kubenswrapper[4946]: I1204 15:24:45.919694 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a13d6013-a7ee-49b5-be0d-b9c41b531916" containerName="glance-log" containerID="cri-o://a081eefc7088f1bf0d59ef9b4781c2edb95563b471d5158073efa73273c843e1" gracePeriod=30 Dec 04 15:24:45 crc kubenswrapper[4946]: I1204 15:24:45.919767 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a13d6013-a7ee-49b5-be0d-b9c41b531916" containerName="glance-httpd" containerID="cri-o://6834cd2e54980ceee681bdaf767d14f3087936e025513156c74cbec39436f806" gracePeriod=30 Dec 04 15:24:45 crc kubenswrapper[4946]: I1204 15:24:45.920365 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="81c04277-0e34-46c8-b207-f4988304e238" containerName="glance-log" containerID="cri-o://f776c8ed480eea46d7337c95e2bf9db6dea74b7d7ca40be059faab838f506cb8" gracePeriod=30 Dec 04 15:24:45 crc kubenswrapper[4946]: I1204 15:24:45.920618 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="81c04277-0e34-46c8-b207-f4988304e238" containerName="glance-httpd" containerID="cri-o://da6681e76bc0a4bbb934c1470829c49191bf1383edf9002fe356467821e7f89e" gracePeriod=30 Dec 04 15:24:45 crc kubenswrapper[4946]: I1204 15:24:45.980089 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=26.980068859 podStartE2EDuration="26.980068859s" podCreationTimestamp="2025-12-04 15:24:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:24:45.95562385 +0000 UTC m=+1336.841667511" watchObservedRunningTime="2025-12-04 15:24:45.980068859 +0000 UTC m=+1336.866112500" Dec 04 15:24:45 crc kubenswrapper[4946]: I1204 15:24:45.984037 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=25.984022017 podStartE2EDuration="25.984022017s" podCreationTimestamp="2025-12-04 15:24:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:24:45.97864823 +0000 UTC m=+1336.864691891" watchObservedRunningTime="2025-12-04 15:24:45.984022017 +0000 UTC m=+1336.870065658" Dec 04 15:24:46 crc kubenswrapper[4946]: I1204 15:24:46.934650 4946 generic.go:334] "Generic (PLEG): container finished" podID="81c04277-0e34-46c8-b207-f4988304e238" containerID="da6681e76bc0a4bbb934c1470829c49191bf1383edf9002fe356467821e7f89e" exitCode=0 Dec 04 15:24:46 crc kubenswrapper[4946]: I1204 15:24:46.935099 4946 generic.go:334] "Generic (PLEG): container finished" podID="81c04277-0e34-46c8-b207-f4988304e238" containerID="f776c8ed480eea46d7337c95e2bf9db6dea74b7d7ca40be059faab838f506cb8" exitCode=143 Dec 04 15:24:46 crc kubenswrapper[4946]: I1204 15:24:46.934743 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81c04277-0e34-46c8-b207-f4988304e238","Type":"ContainerDied","Data":"da6681e76bc0a4bbb934c1470829c49191bf1383edf9002fe356467821e7f89e"} Dec 04 15:24:46 crc kubenswrapper[4946]: I1204 15:24:46.935169 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81c04277-0e34-46c8-b207-f4988304e238","Type":"ContainerDied","Data":"f776c8ed480eea46d7337c95e2bf9db6dea74b7d7ca40be059faab838f506cb8"} Dec 04 15:24:46 crc kubenswrapper[4946]: I1204 15:24:46.938280 4946 generic.go:334] "Generic (PLEG): container finished" podID="a13d6013-a7ee-49b5-be0d-b9c41b531916" containerID="6834cd2e54980ceee681bdaf767d14f3087936e025513156c74cbec39436f806" exitCode=0 Dec 04 15:24:46 crc kubenswrapper[4946]: I1204 15:24:46.938315 4946 generic.go:334] "Generic (PLEG): container finished" podID="a13d6013-a7ee-49b5-be0d-b9c41b531916" containerID="a081eefc7088f1bf0d59ef9b4781c2edb95563b471d5158073efa73273c843e1" exitCode=143 Dec 04 15:24:46 crc kubenswrapper[4946]: I1204 15:24:46.938343 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a13d6013-a7ee-49b5-be0d-b9c41b531916","Type":"ContainerDied","Data":"6834cd2e54980ceee681bdaf767d14f3087936e025513156c74cbec39436f806"} Dec 04 15:24:46 crc kubenswrapper[4946]: I1204 15:24:46.938580 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a13d6013-a7ee-49b5-be0d-b9c41b531916","Type":"ContainerDied","Data":"a081eefc7088f1bf0d59ef9b4781c2edb95563b471d5158073efa73273c843e1"} Dec 04 15:24:48 crc kubenswrapper[4946]: I1204 15:24:48.547621 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:24:48 crc kubenswrapper[4946]: I1204 15:24:48.639532 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-gvts4"] Dec 04 15:24:48 crc kubenswrapper[4946]: I1204 15:24:48.647343 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" podUID="539ee175-1c38-4ed1-8c41-52a5af211b83" containerName="dnsmasq-dns" containerID="cri-o://2af8a4bb0fe3d5278ad3453c532e22348c421c345a099eea9a176bef5f133930" gracePeriod=10 Dec 04 15:24:49 crc kubenswrapper[4946]: I1204 15:24:49.974778 4946 generic.go:334] "Generic (PLEG): container finished" podID="539ee175-1c38-4ed1-8c41-52a5af211b83" containerID="2af8a4bb0fe3d5278ad3453c532e22348c421c345a099eea9a176bef5f133930" exitCode=0 Dec 04 15:24:49 crc kubenswrapper[4946]: I1204 15:24:49.974934 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" event={"ID":"539ee175-1c38-4ed1-8c41-52a5af211b83","Type":"ContainerDied","Data":"2af8a4bb0fe3d5278ad3453c532e22348c421c345a099eea9a176bef5f133930"} Dec 04 15:24:50 crc kubenswrapper[4946]: I1204 15:24:50.406590 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" podUID="539ee175-1c38-4ed1-8c41-52a5af211b83" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.161:5353: connect: connection refused" Dec 04 15:24:51 crc kubenswrapper[4946]: I1204 15:24:51.329173 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 04 15:24:51 crc kubenswrapper[4946]: I1204 15:24:51.329731 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 04 15:24:51 crc kubenswrapper[4946]: I1204 15:24:51.705756 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 04 15:24:51 crc kubenswrapper[4946]: I1204 15:24:51.705845 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.666622 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.768410 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-svc\") pod \"539ee175-1c38-4ed1-8c41-52a5af211b83\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.768527 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-sb\") pod \"539ee175-1c38-4ed1-8c41-52a5af211b83\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.768549 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-swift-storage-0\") pod \"539ee175-1c38-4ed1-8c41-52a5af211b83\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.768586 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsww2\" (UniqueName: \"kubernetes.io/projected/539ee175-1c38-4ed1-8c41-52a5af211b83-kube-api-access-tsww2\") pod \"539ee175-1c38-4ed1-8c41-52a5af211b83\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.769187 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-nb\") pod \"539ee175-1c38-4ed1-8c41-52a5af211b83\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.769266 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-config\") pod \"539ee175-1c38-4ed1-8c41-52a5af211b83\" (UID: \"539ee175-1c38-4ed1-8c41-52a5af211b83\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.776959 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/539ee175-1c38-4ed1-8c41-52a5af211b83-kube-api-access-tsww2" (OuterVolumeSpecName: "kube-api-access-tsww2") pod "539ee175-1c38-4ed1-8c41-52a5af211b83" (UID: "539ee175-1c38-4ed1-8c41-52a5af211b83"). InnerVolumeSpecName "kube-api-access-tsww2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.786750 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.842436 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "539ee175-1c38-4ed1-8c41-52a5af211b83" (UID: "539ee175-1c38-4ed1-8c41-52a5af211b83"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.854857 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "539ee175-1c38-4ed1-8c41-52a5af211b83" (UID: "539ee175-1c38-4ed1-8c41-52a5af211b83"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.872359 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-config-data\") pod \"a13d6013-a7ee-49b5-be0d-b9c41b531916\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.872446 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pf9g\" (UniqueName: \"kubernetes.io/projected/a13d6013-a7ee-49b5-be0d-b9c41b531916-kube-api-access-6pf9g\") pod \"a13d6013-a7ee-49b5-be0d-b9c41b531916\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.872522 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-httpd-run\") pod \"a13d6013-a7ee-49b5-be0d-b9c41b531916\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.872543 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-logs\") pod \"a13d6013-a7ee-49b5-be0d-b9c41b531916\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.872905 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"a13d6013-a7ee-49b5-be0d-b9c41b531916\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.873008 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-scripts\") pod \"a13d6013-a7ee-49b5-be0d-b9c41b531916\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.873154 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-combined-ca-bundle\") pod \"a13d6013-a7ee-49b5-be0d-b9c41b531916\" (UID: \"a13d6013-a7ee-49b5-be0d-b9c41b531916\") " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.873850 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-logs" (OuterVolumeSpecName: "logs") pod "a13d6013-a7ee-49b5-be0d-b9c41b531916" (UID: "a13d6013-a7ee-49b5-be0d-b9c41b531916"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.874133 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "539ee175-1c38-4ed1-8c41-52a5af211b83" (UID: "539ee175-1c38-4ed1-8c41-52a5af211b83"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.876904 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.876975 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.876997 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsww2\" (UniqueName: \"kubernetes.io/projected/539ee175-1c38-4ed1-8c41-52a5af211b83-kube-api-access-tsww2\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.877014 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.877147 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.877489 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a13d6013-a7ee-49b5-be0d-b9c41b531916" (UID: "a13d6013-a7ee-49b5-be0d-b9c41b531916"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.878237 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a13d6013-a7ee-49b5-be0d-b9c41b531916-kube-api-access-6pf9g" (OuterVolumeSpecName: "kube-api-access-6pf9g") pod "a13d6013-a7ee-49b5-be0d-b9c41b531916" (UID: "a13d6013-a7ee-49b5-be0d-b9c41b531916"). InnerVolumeSpecName "kube-api-access-6pf9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.882677 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-scripts" (OuterVolumeSpecName: "scripts") pod "a13d6013-a7ee-49b5-be0d-b9c41b531916" (UID: "a13d6013-a7ee-49b5-be0d-b9c41b531916"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.891745 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "539ee175-1c38-4ed1-8c41-52a5af211b83" (UID: "539ee175-1c38-4ed1-8c41-52a5af211b83"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.904677 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a" (OuterVolumeSpecName: "glance") pod "a13d6013-a7ee-49b5-be0d-b9c41b531916" (UID: "a13d6013-a7ee-49b5-be0d-b9c41b531916"). InnerVolumeSpecName "pvc-c2443261-ac2f-492a-9ba5-293baae6928a". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.905060 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-config" (OuterVolumeSpecName: "config") pod "539ee175-1c38-4ed1-8c41-52a5af211b83" (UID: "539ee175-1c38-4ed1-8c41-52a5af211b83"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.907680 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a13d6013-a7ee-49b5-be0d-b9c41b531916" (UID: "a13d6013-a7ee-49b5-be0d-b9c41b531916"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.947391 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-config-data" (OuterVolumeSpecName: "config-data") pod "a13d6013-a7ee-49b5-be0d-b9c41b531916" (UID: "a13d6013-a7ee-49b5-be0d-b9c41b531916"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.978955 4946 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") on node \"crc\" " Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.979009 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.979028 4946 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.979044 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.979056 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/539ee175-1c38-4ed1-8c41-52a5af211b83-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.979065 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a13d6013-a7ee-49b5-be0d-b9c41b531916-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.979077 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pf9g\" (UniqueName: \"kubernetes.io/projected/a13d6013-a7ee-49b5-be0d-b9c41b531916-kube-api-access-6pf9g\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:52 crc kubenswrapper[4946]: I1204 15:24:52.979085 4946 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a13d6013-a7ee-49b5-be0d-b9c41b531916-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.004865 4946 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.005101 4946 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c2443261-ac2f-492a-9ba5-293baae6928a" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a") on node "crc" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.029626 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a13d6013-a7ee-49b5-be0d-b9c41b531916","Type":"ContainerDied","Data":"d997c89ed5fac0dee2cfcf251688f4bb82c1380cdfc86058a2879bcc1d5f21d7"} Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.029710 4946 scope.go:117] "RemoveContainer" containerID="6834cd2e54980ceee681bdaf767d14f3087936e025513156c74cbec39436f806" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.029640 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.040153 4946 generic.go:334] "Generic (PLEG): container finished" podID="79b3e881-2a15-43cf-aefa-b0b4dc1f5935" containerID="92d9c0475208571121aca1c206c5f8e190ab1b720f04730a7c8f3d0143b3de7b" exitCode=0 Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.040297 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fzhsp" event={"ID":"79b3e881-2a15-43cf-aefa-b0b4dc1f5935","Type":"ContainerDied","Data":"92d9c0475208571121aca1c206c5f8e190ab1b720f04730a7c8f3d0143b3de7b"} Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.049842 4946 generic.go:334] "Generic (PLEG): container finished" podID="c94e50af-9ae2-4ed6-a351-ccff8209cd55" containerID="69d68757db23ce226565173643b4ba8f92219d161e8d4837acffdeb9b1a1ed47" exitCode=0 Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.049908 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4qx6" event={"ID":"c94e50af-9ae2-4ed6-a351-ccff8209cd55","Type":"ContainerDied","Data":"69d68757db23ce226565173643b4ba8f92219d161e8d4837acffdeb9b1a1ed47"} Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.065561 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-tvf2t" event={"ID":"7a2d2577-fc5e-4375-8c8f-154aa218707f","Type":"ContainerStarted","Data":"03bc0e32c860c9d532acd53ed6cb6845f89746360357f564ecf12f8a29009193"} Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.083339 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" event={"ID":"539ee175-1c38-4ed1-8c41-52a5af211b83","Type":"ContainerDied","Data":"1a79a61c7dfb2cd02fa916a2d1fb4bfc420f821293d82cb564a454eee43f66d6"} Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.083497 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-gvts4" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.086708 4946 reconciler_common.go:293] "Volume detached for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.138026 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.176982 4946 scope.go:117] "RemoveContainer" containerID="a081eefc7088f1bf0d59ef9b4781c2edb95563b471d5158073efa73273c843e1" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.177218 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.205818 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-tvf2t" podStartSLOduration=4.104813299 podStartE2EDuration="59.205791162s" podCreationTimestamp="2025-12-04 15:23:54 +0000 UTC" firstStartedPulling="2025-12-04 15:23:57.080232189 +0000 UTC m=+1287.966275830" lastFinishedPulling="2025-12-04 15:24:52.181210052 +0000 UTC m=+1343.067253693" observedRunningTime="2025-12-04 15:24:53.12554521 +0000 UTC m=+1344.011588851" watchObservedRunningTime="2025-12-04 15:24:53.205791162 +0000 UTC m=+1344.091834803" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.220341 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-gvts4"] Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.227183 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-gvts4"] Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.243772 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:24:53 crc kubenswrapper[4946]: E1204 15:24:53.244393 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="539ee175-1c38-4ed1-8c41-52a5af211b83" containerName="dnsmasq-dns" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.244409 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="539ee175-1c38-4ed1-8c41-52a5af211b83" containerName="dnsmasq-dns" Dec 04 15:24:53 crc kubenswrapper[4946]: E1204 15:24:53.244424 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a13d6013-a7ee-49b5-be0d-b9c41b531916" containerName="glance-log" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.244431 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a13d6013-a7ee-49b5-be0d-b9c41b531916" containerName="glance-log" Dec 04 15:24:53 crc kubenswrapper[4946]: E1204 15:24:53.244445 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a13d6013-a7ee-49b5-be0d-b9c41b531916" containerName="glance-httpd" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.244451 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a13d6013-a7ee-49b5-be0d-b9c41b531916" containerName="glance-httpd" Dec 04 15:24:53 crc kubenswrapper[4946]: E1204 15:24:53.244478 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="539ee175-1c38-4ed1-8c41-52a5af211b83" containerName="init" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.244483 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="539ee175-1c38-4ed1-8c41-52a5af211b83" containerName="init" Dec 04 15:24:53 crc kubenswrapper[4946]: E1204 15:24:53.244503 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c889ed87-8aa4-4f9a-8191-d93ff43d7bf1" containerName="init" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.244509 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c889ed87-8aa4-4f9a-8191-d93ff43d7bf1" containerName="init" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.244727 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a13d6013-a7ee-49b5-be0d-b9c41b531916" containerName="glance-httpd" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.244744 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c889ed87-8aa4-4f9a-8191-d93ff43d7bf1" containerName="init" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.244755 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a13d6013-a7ee-49b5-be0d-b9c41b531916" containerName="glance-log" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.244765 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="539ee175-1c38-4ed1-8c41-52a5af211b83" containerName="dnsmasq-dns" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.246286 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.250557 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.253186 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.253512 4946 scope.go:117] "RemoveContainer" containerID="2af8a4bb0fe3d5278ad3453c532e22348c421c345a099eea9a176bef5f133930" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.256930 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.294930 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-scripts\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.294981 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.295024 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.295080 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.295139 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-logs\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.295170 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4lb9\" (UniqueName: \"kubernetes.io/projected/20d2aa22-a98c-483e-b74a-dd549ec45640-kube-api-access-h4lb9\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.295293 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-config-data\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.295330 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.302838 4946 scope.go:117] "RemoveContainer" containerID="62de0f529dcffe666f65efd385cf1cf20d0482562d18c84bbb69e0e7ca4b8431" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.401123 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-config-data\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.401192 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.401226 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-scripts\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.401243 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.401270 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.401304 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.401334 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-logs\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.401361 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4lb9\" (UniqueName: \"kubernetes.io/projected/20d2aa22-a98c-483e-b74a-dd549ec45640-kube-api-access-h4lb9\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.403362 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.403776 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-logs\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.407328 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.407364 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f3260bbd2cdb28f5793a9d1edc63c254c747b9e66cce69dc3f280fc78b1b134b/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.407972 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.408357 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-config-data\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.408861 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-scripts\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.412196 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.425847 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4lb9\" (UniqueName: \"kubernetes.io/projected/20d2aa22-a98c-483e-b74a-dd549ec45640-kube-api-access-h4lb9\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.473584 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="539ee175-1c38-4ed1-8c41-52a5af211b83" path="/var/lib/kubelet/pods/539ee175-1c38-4ed1-8c41-52a5af211b83/volumes" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.474329 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a13d6013-a7ee-49b5-be0d-b9c41b531916" path="/var/lib/kubelet/pods/a13d6013-a7ee-49b5-be0d-b9c41b531916/volumes" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.498533 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.531918 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.585775 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.604471 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gd8j\" (UniqueName: \"kubernetes.io/projected/81c04277-0e34-46c8-b207-f4988304e238-kube-api-access-4gd8j\") pod \"81c04277-0e34-46c8-b207-f4988304e238\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.604551 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-httpd-run\") pod \"81c04277-0e34-46c8-b207-f4988304e238\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.604645 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-scripts\") pod \"81c04277-0e34-46c8-b207-f4988304e238\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.604789 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"81c04277-0e34-46c8-b207-f4988304e238\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.604858 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-config-data\") pod \"81c04277-0e34-46c8-b207-f4988304e238\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.604926 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-logs\") pod \"81c04277-0e34-46c8-b207-f4988304e238\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.605325 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-combined-ca-bundle\") pod \"81c04277-0e34-46c8-b207-f4988304e238\" (UID: \"81c04277-0e34-46c8-b207-f4988304e238\") " Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.608341 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "81c04277-0e34-46c8-b207-f4988304e238" (UID: "81c04277-0e34-46c8-b207-f4988304e238"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.609774 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-logs" (OuterVolumeSpecName: "logs") pod "81c04277-0e34-46c8-b207-f4988304e238" (UID: "81c04277-0e34-46c8-b207-f4988304e238"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.616264 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-scripts" (OuterVolumeSpecName: "scripts") pod "81c04277-0e34-46c8-b207-f4988304e238" (UID: "81c04277-0e34-46c8-b207-f4988304e238"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.617891 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81c04277-0e34-46c8-b207-f4988304e238-kube-api-access-4gd8j" (OuterVolumeSpecName: "kube-api-access-4gd8j") pod "81c04277-0e34-46c8-b207-f4988304e238" (UID: "81c04277-0e34-46c8-b207-f4988304e238"). InnerVolumeSpecName "kube-api-access-4gd8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.634270 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe" (OuterVolumeSpecName: "glance") pod "81c04277-0e34-46c8-b207-f4988304e238" (UID: "81c04277-0e34-46c8-b207-f4988304e238"). InnerVolumeSpecName "pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.649212 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81c04277-0e34-46c8-b207-f4988304e238" (UID: "81c04277-0e34-46c8-b207-f4988304e238"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.687560 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-config-data" (OuterVolumeSpecName: "config-data") pod "81c04277-0e34-46c8-b207-f4988304e238" (UID: "81c04277-0e34-46c8-b207-f4988304e238"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.710746 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.710979 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gd8j\" (UniqueName: \"kubernetes.io/projected/81c04277-0e34-46c8-b207-f4988304e238-kube-api-access-4gd8j\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.711071 4946 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.711150 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.711250 4946 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") on node \"crc\" " Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.711308 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c04277-0e34-46c8-b207-f4988304e238-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.711363 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81c04277-0e34-46c8-b207-f4988304e238-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.774753 4946 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.775054 4946 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe") on node "crc" Dec 04 15:24:53 crc kubenswrapper[4946]: I1204 15:24:53.813622 4946 reconciler_common.go:293] "Volume detached for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.116993 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"81c04277-0e34-46c8-b207-f4988304e238","Type":"ContainerDied","Data":"b4a595216431109a8c7af1f3b938c7c4b426d10239c5d744761b65118ddfb91c"} Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.117392 4946 scope.go:117] "RemoveContainer" containerID="da6681e76bc0a4bbb934c1470829c49191bf1383edf9002fe356467821e7f89e" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.117531 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.131073 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cqwq4" event={"ID":"8fe7f895-e33e-4159-9dcd-689158d16f22","Type":"ContainerStarted","Data":"4a584f8c85227b7e4909b4169e6dd4224d00797c8adccb9ed321bf793d24a3f8"} Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.168246 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-cqwq4" podStartSLOduration=4.535959687 podStartE2EDuration="1m0.168210106s" podCreationTimestamp="2025-12-04 15:23:54 +0000 UTC" firstStartedPulling="2025-12-04 15:23:56.955017051 +0000 UTC m=+1287.841060692" lastFinishedPulling="2025-12-04 15:24:52.58726748 +0000 UTC m=+1343.473311111" observedRunningTime="2025-12-04 15:24:54.167488057 +0000 UTC m=+1345.053531698" watchObservedRunningTime="2025-12-04 15:24:54.168210106 +0000 UTC m=+1345.054253747" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.226672 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.241815 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.262223 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:24:54 crc kubenswrapper[4946]: E1204 15:24:54.263090 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c04277-0e34-46c8-b207-f4988304e238" containerName="glance-httpd" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.263208 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c04277-0e34-46c8-b207-f4988304e238" containerName="glance-httpd" Dec 04 15:24:54 crc kubenswrapper[4946]: E1204 15:24:54.263333 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c04277-0e34-46c8-b207-f4988304e238" containerName="glance-log" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.263386 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c04277-0e34-46c8-b207-f4988304e238" containerName="glance-log" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.263682 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="81c04277-0e34-46c8-b207-f4988304e238" containerName="glance-log" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.263783 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="81c04277-0e34-46c8-b207-f4988304e238" containerName="glance-httpd" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.280814 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.302248 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.306940 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.308054 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.365489 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.433782 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.433873 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.433910 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.433969 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.433999 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.434017 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.434034 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6255p\" (UniqueName: \"kubernetes.io/projected/978d45f1-6556-4486-8175-29a7f68b263a-kube-api-access-6255p\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.434097 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-logs\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.536196 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.536772 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.536829 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.536880 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.536922 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.536952 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.536985 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6255p\" (UniqueName: \"kubernetes.io/projected/978d45f1-6556-4486-8175-29a7f68b263a-kube-api-access-6255p\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.537041 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-logs\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.537614 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-logs\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.538794 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.554869 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.554915 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d80d3a2b37ec7093e1c47ec6e0b9eb3b02741300f62422d1aa8919218995c41a/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.560461 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.564479 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6255p\" (UniqueName: \"kubernetes.io/projected/978d45f1-6556-4486-8175-29a7f68b263a-kube-api-access-6255p\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.570821 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.573759 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.584316 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.610298 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:24:54 crc kubenswrapper[4946]: I1204 15:24:54.640072 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 15:24:55 crc kubenswrapper[4946]: I1204 15:24:55.473886 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81c04277-0e34-46c8-b207-f4988304e238" path="/var/lib/kubelet/pods/81c04277-0e34-46c8-b207-f4988304e238/volumes" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.012371 4946 scope.go:117] "RemoveContainer" containerID="f776c8ed480eea46d7337c95e2bf9db6dea74b7d7ca40be059faab838f506cb8" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.220088 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fzhsp" event={"ID":"79b3e881-2a15-43cf-aefa-b0b4dc1f5935","Type":"ContainerDied","Data":"d0a15eb3c09e6adef35122a665a8e82f62bb0d7ed4986a4463f3463fbe292c58"} Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.221531 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0a15eb3c09e6adef35122a665a8e82f62bb0d7ed4986a4463f3463fbe292c58" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.222090 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20d2aa22-a98c-483e-b74a-dd549ec45640","Type":"ContainerStarted","Data":"903b1db54866acba13c6cc308b151db03c43fb9a1094240ea2ade31543761de8"} Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.222374 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fzhsp" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.225612 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4qx6" event={"ID":"c94e50af-9ae2-4ed6-a351-ccff8209cd55","Type":"ContainerDied","Data":"898bed60fc52bee4d18bdefc21a72390a27899b42c5072dcc2fd89312a28ac74"} Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.225667 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="898bed60fc52bee4d18bdefc21a72390a27899b42c5072dcc2fd89312a28ac74" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.233420 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.347815 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x6t4\" (UniqueName: \"kubernetes.io/projected/c94e50af-9ae2-4ed6-a351-ccff8209cd55-kube-api-access-2x6t4\") pod \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.348127 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-scripts\") pod \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.348168 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-logs\") pod \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.348221 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-scripts\") pod \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.348245 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-combined-ca-bundle\") pod \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.348307 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-config-data\") pod \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.348388 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-config-data\") pod \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.348439 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-combined-ca-bundle\") pod \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.348468 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl8qx\" (UniqueName: \"kubernetes.io/projected/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-kube-api-access-dl8qx\") pod \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\" (UID: \"79b3e881-2a15-43cf-aefa-b0b4dc1f5935\") " Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.348509 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-fernet-keys\") pod \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.348547 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-credential-keys\") pod \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\" (UID: \"c94e50af-9ae2-4ed6-a351-ccff8209cd55\") " Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.349571 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-logs" (OuterVolumeSpecName: "logs") pod "79b3e881-2a15-43cf-aefa-b0b4dc1f5935" (UID: "79b3e881-2a15-43cf-aefa-b0b4dc1f5935"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.362650 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-kube-api-access-dl8qx" (OuterVolumeSpecName: "kube-api-access-dl8qx") pod "79b3e881-2a15-43cf-aefa-b0b4dc1f5935" (UID: "79b3e881-2a15-43cf-aefa-b0b4dc1f5935"). InnerVolumeSpecName "kube-api-access-dl8qx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.374769 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "c94e50af-9ae2-4ed6-a351-ccff8209cd55" (UID: "c94e50af-9ae2-4ed6-a351-ccff8209cd55"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.377493 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c94e50af-9ae2-4ed6-a351-ccff8209cd55" (UID: "c94e50af-9ae2-4ed6-a351-ccff8209cd55"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.377526 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-scripts" (OuterVolumeSpecName: "scripts") pod "79b3e881-2a15-43cf-aefa-b0b4dc1f5935" (UID: "79b3e881-2a15-43cf-aefa-b0b4dc1f5935"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.377903 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-scripts" (OuterVolumeSpecName: "scripts") pod "c94e50af-9ae2-4ed6-a351-ccff8209cd55" (UID: "c94e50af-9ae2-4ed6-a351-ccff8209cd55"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.383627 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c94e50af-9ae2-4ed6-a351-ccff8209cd55-kube-api-access-2x6t4" (OuterVolumeSpecName: "kube-api-access-2x6t4") pod "c94e50af-9ae2-4ed6-a351-ccff8209cd55" (UID: "c94e50af-9ae2-4ed6-a351-ccff8209cd55"). InnerVolumeSpecName "kube-api-access-2x6t4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.417091 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-config-data" (OuterVolumeSpecName: "config-data") pod "79b3e881-2a15-43cf-aefa-b0b4dc1f5935" (UID: "79b3e881-2a15-43cf-aefa-b0b4dc1f5935"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.431567 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-config-data" (OuterVolumeSpecName: "config-data") pod "c94e50af-9ae2-4ed6-a351-ccff8209cd55" (UID: "c94e50af-9ae2-4ed6-a351-ccff8209cd55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.438018 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c94e50af-9ae2-4ed6-a351-ccff8209cd55" (UID: "c94e50af-9ae2-4ed6-a351-ccff8209cd55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.451562 4946 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.451606 4946 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.451621 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x6t4\" (UniqueName: \"kubernetes.io/projected/c94e50af-9ae2-4ed6-a351-ccff8209cd55-kube-api-access-2x6t4\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.451635 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.451647 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.451659 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.451668 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.451678 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.451687 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c94e50af-9ae2-4ed6-a351-ccff8209cd55-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.451697 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl8qx\" (UniqueName: \"kubernetes.io/projected/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-kube-api-access-dl8qx\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.459019 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79b3e881-2a15-43cf-aefa-b0b4dc1f5935" (UID: "79b3e881-2a15-43cf-aefa-b0b4dc1f5935"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.554967 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79b3e881-2a15-43cf-aefa-b0b4dc1f5935-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:24:58 crc kubenswrapper[4946]: I1204 15:24:58.691534 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:24:58 crc kubenswrapper[4946]: W1204 15:24:58.704003 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod978d45f1_6556_4486_8175_29a7f68b263a.slice/crio-53f99e0589e14fadfeec81bad52c91ad056af2f06d8cd4649d377be02c6dda10 WatchSource:0}: Error finding container 53f99e0589e14fadfeec81bad52c91ad056af2f06d8cd4649d377be02c6dda10: Status 404 returned error can't find the container with id 53f99e0589e14fadfeec81bad52c91ad056af2f06d8cd4649d377be02c6dda10 Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.249983 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-w5njq" event={"ID":"c6df584d-65d7-4829-8937-3ac0ab49b71b","Type":"ContainerStarted","Data":"fa591cb12a326a0be71c8f464fcc92ed7afe15e10f7e6befabefbc25961efe9b"} Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.258349 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"978d45f1-6556-4486-8175-29a7f68b263a","Type":"ContainerStarted","Data":"53f99e0589e14fadfeec81bad52c91ad056af2f06d8cd4649d377be02c6dda10"} Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.266813 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef","Type":"ContainerStarted","Data":"e1861f0fbc05f59f814d8127d7687682c33d543b0097b40e1ff4b016a9589ebf"} Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.279275 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20d2aa22-a98c-483e-b74a-dd549ec45640","Type":"ContainerStarted","Data":"2012190c874c38dc1fcf608c073022d9d04b51c5f1049415ab6b9593e4e9b297"} Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.287646 4946 generic.go:334] "Generic (PLEG): container finished" podID="7a2d2577-fc5e-4375-8c8f-154aa218707f" containerID="03bc0e32c860c9d532acd53ed6cb6845f89746360357f564ecf12f8a29009193" exitCode=0 Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.287757 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-tvf2t" event={"ID":"7a2d2577-fc5e-4375-8c8f-154aa218707f","Type":"ContainerDied","Data":"03bc0e32c860c9d532acd53ed6cb6845f89746360357f564ecf12f8a29009193"} Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.287830 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4qx6" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.287868 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fzhsp" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.290145 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-w5njq" podStartSLOduration=4.150361745 podStartE2EDuration="1m5.290095226s" podCreationTimestamp="2025-12-04 15:23:54 +0000 UTC" firstStartedPulling="2025-12-04 15:23:57.010090849 +0000 UTC m=+1287.896134490" lastFinishedPulling="2025-12-04 15:24:58.14982433 +0000 UTC m=+1349.035867971" observedRunningTime="2025-12-04 15:24:59.276532461 +0000 UTC m=+1350.162576102" watchObservedRunningTime="2025-12-04 15:24:59.290095226 +0000 UTC m=+1350.176138867" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.420859 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-b7bc594d8-sjpg5"] Dec 04 15:24:59 crc kubenswrapper[4946]: E1204 15:24:59.421465 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b3e881-2a15-43cf-aefa-b0b4dc1f5935" containerName="placement-db-sync" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.421488 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b3e881-2a15-43cf-aefa-b0b4dc1f5935" containerName="placement-db-sync" Dec 04 15:24:59 crc kubenswrapper[4946]: E1204 15:24:59.421504 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c94e50af-9ae2-4ed6-a351-ccff8209cd55" containerName="keystone-bootstrap" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.421512 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c94e50af-9ae2-4ed6-a351-ccff8209cd55" containerName="keystone-bootstrap" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.421733 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c94e50af-9ae2-4ed6-a351-ccff8209cd55" containerName="keystone-bootstrap" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.421752 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="79b3e881-2a15-43cf-aefa-b0b4dc1f5935" containerName="placement-db-sync" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.433087 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.438455 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6c965d6d44-d6246"] Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.441777 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.442684 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.442839 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.442945 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.443067 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.443371 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.443887 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-d8v6l" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.445634 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.445756 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.445859 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.445979 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.446079 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-v798j" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.456799 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b7bc594d8-sjpg5"] Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498192 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-combined-ca-bundle\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498233 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-config-data\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498269 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-internal-tls-certs\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498297 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-scripts\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498317 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8177c201-11cb-42af-8a3f-85944e6558a3-logs\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498346 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6pj5\" (UniqueName: \"kubernetes.io/projected/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-kube-api-access-w6pj5\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498370 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-config-data\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498393 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-internal-tls-certs\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498432 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-scripts\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498459 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-credential-keys\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498476 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-combined-ca-bundle\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498526 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhtp5\" (UniqueName: \"kubernetes.io/projected/8177c201-11cb-42af-8a3f-85944e6558a3-kube-api-access-vhtp5\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498555 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-fernet-keys\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498576 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-public-tls-certs\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.498606 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-public-tls-certs\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.507565 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6c965d6d44-d6246"] Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.601546 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-fernet-keys\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.601611 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-public-tls-certs\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.601676 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-public-tls-certs\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.601760 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-combined-ca-bundle\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.601909 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-config-data\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.602174 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-internal-tls-certs\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.602258 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-scripts\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.602290 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8177c201-11cb-42af-8a3f-85944e6558a3-logs\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.602437 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6pj5\" (UniqueName: \"kubernetes.io/projected/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-kube-api-access-w6pj5\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.602516 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-config-data\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.602553 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-internal-tls-certs\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.602681 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-scripts\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.602848 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-credential-keys\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.602877 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-combined-ca-bundle\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.603134 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhtp5\" (UniqueName: \"kubernetes.io/projected/8177c201-11cb-42af-8a3f-85944e6558a3-kube-api-access-vhtp5\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.604429 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8177c201-11cb-42af-8a3f-85944e6558a3-logs\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.607001 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-fernet-keys\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.610218 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-credential-keys\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.610265 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-internal-tls-certs\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.610520 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-config-data\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.610647 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-combined-ca-bundle\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.612641 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-config-data\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.613937 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-public-tls-certs\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.613987 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-scripts\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.614089 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-internal-tls-certs\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.615349 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-combined-ca-bundle\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.616301 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-public-tls-certs\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.621045 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8177c201-11cb-42af-8a3f-85944e6558a3-scripts\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.627902 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhtp5\" (UniqueName: \"kubernetes.io/projected/8177c201-11cb-42af-8a3f-85944e6558a3-kube-api-access-vhtp5\") pod \"placement-6c965d6d44-d6246\" (UID: \"8177c201-11cb-42af-8a3f-85944e6558a3\") " pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.633392 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6pj5\" (UniqueName: \"kubernetes.io/projected/279e516e-61bc-4d5b-a3f9-34ecc6c5f47b-kube-api-access-w6pj5\") pod \"keystone-b7bc594d8-sjpg5\" (UID: \"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b\") " pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.801013 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:24:59 crc kubenswrapper[4946]: I1204 15:24:59.824533 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:25:00 crc kubenswrapper[4946]: I1204 15:25:00.311333 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"978d45f1-6556-4486-8175-29a7f68b263a","Type":"ContainerStarted","Data":"934b953f53f98d777b40fbcf75dc9881b567c4b6fbcf42480848bfe6f4d35af1"} Dec 04 15:25:00 crc kubenswrapper[4946]: I1204 15:25:00.330411 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20d2aa22-a98c-483e-b74a-dd549ec45640","Type":"ContainerStarted","Data":"0c85f1b8b663d3558698ef8619bbbb0c2bbb53cc3a3c084c0f23d12eb3f52652"} Dec 04 15:25:00 crc kubenswrapper[4946]: I1204 15:25:00.376342 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.376312856 podStartE2EDuration="7.376312856s" podCreationTimestamp="2025-12-04 15:24:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:00.367108828 +0000 UTC m=+1351.253152479" watchObservedRunningTime="2025-12-04 15:25:00.376312856 +0000 UTC m=+1351.262356497" Dec 04 15:25:00 crc kubenswrapper[4946]: I1204 15:25:00.513733 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b7bc594d8-sjpg5"] Dec 04 15:25:00 crc kubenswrapper[4946]: I1204 15:25:00.529032 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6c965d6d44-d6246"] Dec 04 15:25:00 crc kubenswrapper[4946]: W1204 15:25:00.560162 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8177c201_11cb_42af_8a3f_85944e6558a3.slice/crio-2d589b9b6c4cfeacb140c3ee5d4853ff86e628a60d1753566fbea2ab45cc4346 WatchSource:0}: Error finding container 2d589b9b6c4cfeacb140c3ee5d4853ff86e628a60d1753566fbea2ab45cc4346: Status 404 returned error can't find the container with id 2d589b9b6c4cfeacb140c3ee5d4853ff86e628a60d1753566fbea2ab45cc4346 Dec 04 15:25:00 crc kubenswrapper[4946]: I1204 15:25:00.875545 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.068296 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-db-sync-config-data\") pod \"7a2d2577-fc5e-4375-8c8f-154aa218707f\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.069042 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvw6r\" (UniqueName: \"kubernetes.io/projected/7a2d2577-fc5e-4375-8c8f-154aa218707f-kube-api-access-kvw6r\") pod \"7a2d2577-fc5e-4375-8c8f-154aa218707f\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.069151 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-combined-ca-bundle\") pod \"7a2d2577-fc5e-4375-8c8f-154aa218707f\" (UID: \"7a2d2577-fc5e-4375-8c8f-154aa218707f\") " Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.075648 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a2d2577-fc5e-4375-8c8f-154aa218707f-kube-api-access-kvw6r" (OuterVolumeSpecName: "kube-api-access-kvw6r") pod "7a2d2577-fc5e-4375-8c8f-154aa218707f" (UID: "7a2d2577-fc5e-4375-8c8f-154aa218707f"). InnerVolumeSpecName "kube-api-access-kvw6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.156051 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a2d2577-fc5e-4375-8c8f-154aa218707f" (UID: "7a2d2577-fc5e-4375-8c8f-154aa218707f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.170815 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7a2d2577-fc5e-4375-8c8f-154aa218707f" (UID: "7a2d2577-fc5e-4375-8c8f-154aa218707f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.184875 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvw6r\" (UniqueName: \"kubernetes.io/projected/7a2d2577-fc5e-4375-8c8f-154aa218707f-kube-api-access-kvw6r\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.184927 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.184941 4946 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a2d2577-fc5e-4375-8c8f-154aa218707f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.373052 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-tvf2t" event={"ID":"7a2d2577-fc5e-4375-8c8f-154aa218707f","Type":"ContainerDied","Data":"0fba1fe3f01d482c1a37812162141a9e0c628b74d752c5e846727d0db86e926d"} Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.373106 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fba1fe3f01d482c1a37812162141a9e0c628b74d752c5e846727d0db86e926d" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.373196 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-tvf2t" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.387526 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"978d45f1-6556-4486-8175-29a7f68b263a","Type":"ContainerStarted","Data":"7df9632d6581dd2761d69f8860814b2ebc9925d10e95483d83136bde92417db7"} Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.405835 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b7bc594d8-sjpg5" event={"ID":"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b","Type":"ContainerStarted","Data":"81b67d9822d62ec1c330863f68fcef818bb2616faa4c4ee924ba637526f0a85a"} Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.405935 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b7bc594d8-sjpg5" event={"ID":"279e516e-61bc-4d5b-a3f9-34ecc6c5f47b","Type":"ContainerStarted","Data":"a5c61206f724975d2e47237a29668db6fdfda8fafe878af268650fe5a572461b"} Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.406533 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.424582 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6c965d6d44-d6246" event={"ID":"8177c201-11cb-42af-8a3f-85944e6558a3","Type":"ContainerStarted","Data":"d7263107f17f2b15e5a87ec475c00459ec8d132b89dd1d6ecc191734b4255913"} Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.424662 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6c965d6d44-d6246" event={"ID":"8177c201-11cb-42af-8a3f-85944e6558a3","Type":"ContainerStarted","Data":"2d589b9b6c4cfeacb140c3ee5d4853ff86e628a60d1753566fbea2ab45cc4346"} Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.429175 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.429148626 podStartE2EDuration="7.429148626s" podCreationTimestamp="2025-12-04 15:24:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:01.425176859 +0000 UTC m=+1352.311220520" watchObservedRunningTime="2025-12-04 15:25:01.429148626 +0000 UTC m=+1352.315192267" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.441785 4946 generic.go:334] "Generic (PLEG): container finished" podID="8fe7f895-e33e-4159-9dcd-689158d16f22" containerID="4a584f8c85227b7e4909b4169e6dd4224d00797c8adccb9ed321bf793d24a3f8" exitCode=0 Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.444217 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cqwq4" event={"ID":"8fe7f895-e33e-4159-9dcd-689158d16f22","Type":"ContainerDied","Data":"4a584f8c85227b7e4909b4169e6dd4224d00797c8adccb9ed321bf793d24a3f8"} Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.478647 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-b7bc594d8-sjpg5" podStartSLOduration=2.478617538 podStartE2EDuration="2.478617538s" podCreationTimestamp="2025-12-04 15:24:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:01.460968973 +0000 UTC m=+1352.347012614" watchObservedRunningTime="2025-12-04 15:25:01.478617538 +0000 UTC m=+1352.364661179" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.571082 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5d869cc959-j4wsw"] Dec 04 15:25:01 crc kubenswrapper[4946]: E1204 15:25:01.572039 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a2d2577-fc5e-4375-8c8f-154aa218707f" containerName="barbican-db-sync" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.572064 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a2d2577-fc5e-4375-8c8f-154aa218707f" containerName="barbican-db-sync" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.572370 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a2d2577-fc5e-4375-8c8f-154aa218707f" containerName="barbican-db-sync" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.573624 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.577352 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-b5qmt" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.579872 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.580279 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.628294 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5d869cc959-j4wsw"] Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.705191 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5dbfff5fc8-dg589"] Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.707363 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.710932 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.721088 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47433338-b9cd-4b5d-beaf-e551ca335c0e-config-data\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.721301 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47433338-b9cd-4b5d-beaf-e551ca335c0e-logs\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.721325 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47433338-b9cd-4b5d-beaf-e551ca335c0e-combined-ca-bundle\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.721354 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5565\" (UniqueName: \"kubernetes.io/projected/47433338-b9cd-4b5d-beaf-e551ca335c0e-kube-api-access-j5565\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.721381 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47433338-b9cd-4b5d-beaf-e551ca335c0e-config-data-custom\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: E1204 15:25:01.739823 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a2d2577_fc5e_4375_8c8f_154aa218707f.slice/crio-0fba1fe3f01d482c1a37812162141a9e0c628b74d752c5e846727d0db86e926d\": RecentStats: unable to find data in memory cache]" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.787862 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5dbfff5fc8-dg589"] Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.824804 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47433338-b9cd-4b5d-beaf-e551ca335c0e-config-data-custom\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.824889 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-logs\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.824933 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-config-data-custom\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.825021 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l749j\" (UniqueName: \"kubernetes.io/projected/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-kube-api-access-l749j\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.825050 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47433338-b9cd-4b5d-beaf-e551ca335c0e-config-data\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.825097 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-combined-ca-bundle\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.825184 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-config-data\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.825225 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47433338-b9cd-4b5d-beaf-e551ca335c0e-logs\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.825247 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47433338-b9cd-4b5d-beaf-e551ca335c0e-combined-ca-bundle\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.825271 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5565\" (UniqueName: \"kubernetes.io/projected/47433338-b9cd-4b5d-beaf-e551ca335c0e-kube-api-access-j5565\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.826752 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47433338-b9cd-4b5d-beaf-e551ca335c0e-logs\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.836545 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47433338-b9cd-4b5d-beaf-e551ca335c0e-combined-ca-bundle\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.837063 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47433338-b9cd-4b5d-beaf-e551ca335c0e-config-data-custom\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.838161 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47433338-b9cd-4b5d-beaf-e551ca335c0e-config-data\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.846321 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-cnx2z"] Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.848334 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.860110 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5565\" (UniqueName: \"kubernetes.io/projected/47433338-b9cd-4b5d-beaf-e551ca335c0e-kube-api-access-j5565\") pod \"barbican-worker-5d869cc959-j4wsw\" (UID: \"47433338-b9cd-4b5d-beaf-e551ca335c0e\") " pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.899753 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d869cc959-j4wsw" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.900662 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-cnx2z"] Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.928085 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-config-data\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.928203 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-logs\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.928248 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-config-data-custom\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.928330 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l749j\" (UniqueName: \"kubernetes.io/projected/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-kube-api-access-l749j\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.928394 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-combined-ca-bundle\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.929798 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-logs\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.937082 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-config-data\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.937315 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-combined-ca-bundle\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.942349 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5c78d69c8-m42w5"] Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.950685 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-config-data-custom\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.955073 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.958325 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 04 15:25:01 crc kubenswrapper[4946]: I1204 15:25:01.994027 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l749j\" (UniqueName: \"kubernetes.io/projected/71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2-kube-api-access-l749j\") pod \"barbican-keystone-listener-5dbfff5fc8-dg589\" (UID: \"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2\") " pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.028702 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5c78d69c8-m42w5"] Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.031730 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-svc\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.031866 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2lpq\" (UniqueName: \"kubernetes.io/projected/c2228f15-2ca3-4507-8fa3-ed675c6648c3-kube-api-access-p2lpq\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.031893 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-config\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.031939 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.031973 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.032361 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.051198 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.139453 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-svc\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.139820 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data-custom\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.139892 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2lpq\" (UniqueName: \"kubernetes.io/projected/c2228f15-2ca3-4507-8fa3-ed675c6648c3-kube-api-access-p2lpq\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.139922 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-config\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.139975 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.140009 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.140049 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.140090 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-logs\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.140166 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-combined-ca-bundle\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.140216 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.140443 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrm4k\" (UniqueName: \"kubernetes.io/projected/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-kube-api-access-jrm4k\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.142051 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.142802 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-svc\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.143132 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-config\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.148150 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.148325 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.164342 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2lpq\" (UniqueName: \"kubernetes.io/projected/c2228f15-2ca3-4507-8fa3-ed675c6648c3-kube-api-access-p2lpq\") pod \"dnsmasq-dns-85ff748b95-cnx2z\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.176086 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.244948 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrm4k\" (UniqueName: \"kubernetes.io/projected/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-kube-api-access-jrm4k\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.245037 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data-custom\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.245095 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.245131 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-logs\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.245161 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-combined-ca-bundle\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.246822 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-logs\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.250489 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-combined-ca-bundle\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.250912 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.251234 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data-custom\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.269982 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrm4k\" (UniqueName: \"kubernetes.io/projected/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-kube-api-access-jrm4k\") pod \"barbican-api-5c78d69c8-m42w5\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.452894 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:02 crc kubenswrapper[4946]: I1204 15:25:02.493324 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6c965d6d44-d6246" event={"ID":"8177c201-11cb-42af-8a3f-85944e6558a3","Type":"ContainerStarted","Data":"f2e918a0768d2516c171e3c068e76f01660e2775c839606b106da1320c432385"} Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.034335 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6c965d6d44-d6246" podStartSLOduration=4.034305834 podStartE2EDuration="4.034305834s" podCreationTimestamp="2025-12-04 15:24:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:02.522760624 +0000 UTC m=+1353.408804265" watchObservedRunningTime="2025-12-04 15:25:03.034305834 +0000 UTC m=+1353.920349475" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.044089 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5dbfff5fc8-dg589"] Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.086797 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5d869cc959-j4wsw"] Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.134378 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-cnx2z"] Dec 04 15:25:03 crc kubenswrapper[4946]: W1204 15:25:03.134572 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2228f15_2ca3_4507_8fa3_ed675c6648c3.slice/crio-40efd4b3e80b95375c319b18e1309f1f4e14fa239f0cd2b681dbd2f8b2f23881 WatchSource:0}: Error finding container 40efd4b3e80b95375c319b18e1309f1f4e14fa239f0cd2b681dbd2f8b2f23881: Status 404 returned error can't find the container with id 40efd4b3e80b95375c319b18e1309f1f4e14fa239f0cd2b681dbd2f8b2f23881 Dec 04 15:25:03 crc kubenswrapper[4946]: W1204 15:25:03.136666 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e01116c_b2f6_4512_a06e_8c2bb9710fb3.slice/crio-01cdcdd681fb7bc4ba169c2bd7d6fce08220fa858f945630e45d81209e64527f WatchSource:0}: Error finding container 01cdcdd681fb7bc4ba169c2bd7d6fce08220fa858f945630e45d81209e64527f: Status 404 returned error can't find the container with id 01cdcdd681fb7bc4ba169c2bd7d6fce08220fa858f945630e45d81209e64527f Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.147005 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5c78d69c8-m42w5"] Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.327742 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.379273 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-scripts\") pod \"8fe7f895-e33e-4159-9dcd-689158d16f22\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.379345 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-combined-ca-bundle\") pod \"8fe7f895-e33e-4159-9dcd-689158d16f22\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.379534 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fe7f895-e33e-4159-9dcd-689158d16f22-etc-machine-id\") pod \"8fe7f895-e33e-4159-9dcd-689158d16f22\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.379565 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bjrd\" (UniqueName: \"kubernetes.io/projected/8fe7f895-e33e-4159-9dcd-689158d16f22-kube-api-access-5bjrd\") pod \"8fe7f895-e33e-4159-9dcd-689158d16f22\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.379598 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-config-data\") pod \"8fe7f895-e33e-4159-9dcd-689158d16f22\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.379756 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-db-sync-config-data\") pod \"8fe7f895-e33e-4159-9dcd-689158d16f22\" (UID: \"8fe7f895-e33e-4159-9dcd-689158d16f22\") " Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.381296 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8fe7f895-e33e-4159-9dcd-689158d16f22-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8fe7f895-e33e-4159-9dcd-689158d16f22" (UID: "8fe7f895-e33e-4159-9dcd-689158d16f22"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.402197 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "8fe7f895-e33e-4159-9dcd-689158d16f22" (UID: "8fe7f895-e33e-4159-9dcd-689158d16f22"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.406882 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fe7f895-e33e-4159-9dcd-689158d16f22-kube-api-access-5bjrd" (OuterVolumeSpecName: "kube-api-access-5bjrd") pod "8fe7f895-e33e-4159-9dcd-689158d16f22" (UID: "8fe7f895-e33e-4159-9dcd-689158d16f22"). InnerVolumeSpecName "kube-api-access-5bjrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.407507 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-scripts" (OuterVolumeSpecName: "scripts") pod "8fe7f895-e33e-4159-9dcd-689158d16f22" (UID: "8fe7f895-e33e-4159-9dcd-689158d16f22"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.486192 4946 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fe7f895-e33e-4159-9dcd-689158d16f22-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.486658 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bjrd\" (UniqueName: \"kubernetes.io/projected/8fe7f895-e33e-4159-9dcd-689158d16f22-kube-api-access-5bjrd\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.486675 4946 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.486685 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.490355 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8fe7f895-e33e-4159-9dcd-689158d16f22" (UID: "8fe7f895-e33e-4159-9dcd-689158d16f22"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.522634 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-config-data" (OuterVolumeSpecName: "config-data") pod "8fe7f895-e33e-4159-9dcd-689158d16f22" (UID: "8fe7f895-e33e-4159-9dcd-689158d16f22"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.530648 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" event={"ID":"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2","Type":"ContainerStarted","Data":"a185335230dcd19616937e05dd8e0256637f79393e8a8bc5f3f3128e0c2bf3f7"} Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.536257 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cqwq4" event={"ID":"8fe7f895-e33e-4159-9dcd-689158d16f22","Type":"ContainerDied","Data":"62bee506dc90864125f6b59ac43c9865b746d3efeeeb0573076a29b2476c8078"} Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.536309 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62bee506dc90864125f6b59ac43c9865b746d3efeeeb0573076a29b2476c8078" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.536383 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cqwq4" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.543740 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d869cc959-j4wsw" event={"ID":"47433338-b9cd-4b5d-beaf-e551ca335c0e","Type":"ContainerStarted","Data":"4c6a58743a3759769b0a8560f0803eb49235ca70585b103328bada9603efa819"} Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.551416 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" event={"ID":"c2228f15-2ca3-4507-8fa3-ed675c6648c3","Type":"ContainerStarted","Data":"40efd4b3e80b95375c319b18e1309f1f4e14fa239f0cd2b681dbd2f8b2f23881"} Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.553337 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d69c8-m42w5" event={"ID":"8e01116c-b2f6-4512-a06e-8c2bb9710fb3","Type":"ContainerStarted","Data":"01cdcdd681fb7bc4ba169c2bd7d6fce08220fa858f945630e45d81209e64527f"} Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.553568 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.553595 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.586625 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.588942 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.591843 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.591874 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fe7f895-e33e-4159-9dcd-689158d16f22-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.617956 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.629727 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.641659 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.669570 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.875511 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 15:25:03 crc kubenswrapper[4946]: E1204 15:25:03.876394 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fe7f895-e33e-4159-9dcd-689158d16f22" containerName="cinder-db-sync" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.876408 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fe7f895-e33e-4159-9dcd-689158d16f22" containerName="cinder-db-sync" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.876642 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fe7f895-e33e-4159-9dcd-689158d16f22" containerName="cinder-db-sync" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.877913 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.889796 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.898214 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.901473 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.903686 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrvbc\" (UniqueName: \"kubernetes.io/projected/9a2770ac-74ec-4a84-9f54-45602f47dd3a-kube-api-access-xrvbc\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.898704 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.898741 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-wl6mt" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.901964 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.926010 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9a2770ac-74ec-4a84-9f54-45602f47dd3a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.926174 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.926225 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.926358 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-scripts\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:03 crc kubenswrapper[4946]: I1204 15:25:03.966871 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-cnx2z"] Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.030989 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.031342 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.031545 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-scripts\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.031771 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.032054 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrvbc\" (UniqueName: \"kubernetes.io/projected/9a2770ac-74ec-4a84-9f54-45602f47dd3a-kube-api-access-xrvbc\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.033577 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9a2770ac-74ec-4a84-9f54-45602f47dd3a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.033945 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9a2770ac-74ec-4a84-9f54-45602f47dd3a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.038209 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-msmr9"] Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.040415 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.046934 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.050383 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.055398 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-scripts\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.072194 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.084426 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-msmr9"] Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.122705 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrvbc\" (UniqueName: \"kubernetes.io/projected/9a2770ac-74ec-4a84-9f54-45602f47dd3a-kube-api-access-xrvbc\") pod \"cinder-scheduler-0\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.140151 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb2zf\" (UniqueName: \"kubernetes.io/projected/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-kube-api-access-pb2zf\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.140245 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.140363 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.140606 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.140708 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.140743 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-config\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.162284 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.188528 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.200715 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.256555 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.260422 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.260573 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-logs\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.260624 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbb9w\" (UniqueName: \"kubernetes.io/projected/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-kube-api-access-fbb9w\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.260711 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.260765 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-config\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.262154 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.262936 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.263072 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.263291 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.263923 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb2zf\" (UniqueName: \"kubernetes.io/projected/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-kube-api-access-pb2zf\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.264007 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-scripts\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.264215 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.264394 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.264432 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.264563 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data-custom\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.264782 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.265845 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.265920 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.272758 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-config\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.306633 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb2zf\" (UniqueName: \"kubernetes.io/projected/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-kube-api-access-pb2zf\") pod \"dnsmasq-dns-5c9776ccc5-msmr9\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.371541 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-scripts\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.372300 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.372397 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data-custom\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.372562 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-logs\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.372603 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbb9w\" (UniqueName: \"kubernetes.io/projected/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-kube-api-access-fbb9w\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.372697 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.372735 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.374433 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-logs\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.375685 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.384323 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data-custom\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.385690 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.385932 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.404964 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbb9w\" (UniqueName: \"kubernetes.io/projected/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-kube-api-access-fbb9w\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.410695 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-scripts\") pod \"cinder-api-0\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.575366 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.591361 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.609835 4946 generic.go:334] "Generic (PLEG): container finished" podID="c2228f15-2ca3-4507-8fa3-ed675c6648c3" containerID="8aa5b40bab0f438b78de5d0401e8c231675292206f08cc374be12017223c78de" exitCode=0 Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.609986 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" event={"ID":"c2228f15-2ca3-4507-8fa3-ed675c6648c3","Type":"ContainerDied","Data":"8aa5b40bab0f438b78de5d0401e8c231675292206f08cc374be12017223c78de"} Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.623656 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d69c8-m42w5" event={"ID":"8e01116c-b2f6-4512-a06e-8c2bb9710fb3","Type":"ContainerStarted","Data":"c5b65fdc664524ce0d0ef7529d4feff4767b17aa21281ae9efef589cf3366a2c"} Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.623725 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d69c8-m42w5" event={"ID":"8e01116c-b2f6-4512-a06e-8c2bb9710fb3","Type":"ContainerStarted","Data":"79bd221dcfa9dfd38a6d062623f7ca01093e48f041e6ec5341e9b5d2be32fded"} Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.624493 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.624587 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.624604 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.624619 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.643627 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.644133 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.739196 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.764308 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 04 15:25:04 crc kubenswrapper[4946]: I1204 15:25:04.806753 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5c78d69c8-m42w5" podStartSLOduration=3.806726608 podStartE2EDuration="3.806726608s" podCreationTimestamp="2025-12-04 15:25:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:04.70656144 +0000 UTC m=+1355.592605091" watchObservedRunningTime="2025-12-04 15:25:04.806726608 +0000 UTC m=+1355.692770249" Dec 04 15:25:05 crc kubenswrapper[4946]: I1204 15:25:05.041268 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 15:25:05 crc kubenswrapper[4946]: I1204 15:25:05.656884 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a2770ac-74ec-4a84-9f54-45602f47dd3a","Type":"ContainerStarted","Data":"81594ae692c95b4fc3455df607ee0b1d7f8ff881280f8da20268f7e1fe4e415c"} Dec 04 15:25:05 crc kubenswrapper[4946]: I1204 15:25:05.659292 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 04 15:25:05 crc kubenswrapper[4946]: I1204 15:25:05.659986 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 04 15:25:05 crc kubenswrapper[4946]: I1204 15:25:05.703073 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 04 15:25:05 crc kubenswrapper[4946]: I1204 15:25:05.879089 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-msmr9"] Dec 04 15:25:05 crc kubenswrapper[4946]: I1204 15:25:05.933749 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-9c7bc6557-kqv86" Dec 04 15:25:06 crc kubenswrapper[4946]: I1204 15:25:06.016240 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6bf8c9bfd6-lcqwj"] Dec 04 15:25:06 crc kubenswrapper[4946]: I1204 15:25:06.016529 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6bf8c9bfd6-lcqwj" podUID="c5b6c82d-825a-448c-a03c-53dee6f4bfc0" containerName="neutron-api" containerID="cri-o://6db82a66a2b56e240fabd9acf126caf510fd23306360ac6609d8db56861245de" gracePeriod=30 Dec 04 15:25:06 crc kubenswrapper[4946]: I1204 15:25:06.017184 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6bf8c9bfd6-lcqwj" podUID="c5b6c82d-825a-448c-a03c-53dee6f4bfc0" containerName="neutron-httpd" containerID="cri-o://50743d59f9fc4173f74e2772198eda1df47511c19225da642f2e66a708a571f1" gracePeriod=30 Dec 04 15:25:06 crc kubenswrapper[4946]: I1204 15:25:06.692848 4946 generic.go:334] "Generic (PLEG): container finished" podID="c5b6c82d-825a-448c-a03c-53dee6f4bfc0" containerID="50743d59f9fc4173f74e2772198eda1df47511c19225da642f2e66a708a571f1" exitCode=0 Dec 04 15:25:06 crc kubenswrapper[4946]: I1204 15:25:06.693258 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6bf8c9bfd6-lcqwj" event={"ID":"c5b6c82d-825a-448c-a03c-53dee6f4bfc0","Type":"ContainerDied","Data":"50743d59f9fc4173f74e2772198eda1df47511c19225da642f2e66a708a571f1"} Dec 04 15:25:06 crc kubenswrapper[4946]: I1204 15:25:06.720580 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" podUID="c2228f15-2ca3-4507-8fa3-ed675c6648c3" containerName="dnsmasq-dns" containerID="cri-o://f555729b14aa7a8cce762cd89a0a878f371d03d68c9fb4c4451314c4dc9dcef5" gracePeriod=10 Dec 04 15:25:06 crc kubenswrapper[4946]: I1204 15:25:06.720891 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" event={"ID":"c2228f15-2ca3-4507-8fa3-ed675c6648c3","Type":"ContainerStarted","Data":"f555729b14aa7a8cce762cd89a0a878f371d03d68c9fb4c4451314c4dc9dcef5"} Dec 04 15:25:06 crc kubenswrapper[4946]: I1204 15:25:06.721454 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:06 crc kubenswrapper[4946]: I1204 15:25:06.765857 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" podStartSLOduration=5.765832851 podStartE2EDuration="5.765832851s" podCreationTimestamp="2025-12-04 15:25:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:06.752845341 +0000 UTC m=+1357.638889012" watchObservedRunningTime="2025-12-04 15:25:06.765832851 +0000 UTC m=+1357.651876492" Dec 04 15:25:07 crc kubenswrapper[4946]: I1204 15:25:07.847722 4946 generic.go:334] "Generic (PLEG): container finished" podID="c2228f15-2ca3-4507-8fa3-ed675c6648c3" containerID="f555729b14aa7a8cce762cd89a0a878f371d03d68c9fb4c4451314c4dc9dcef5" exitCode=0 Dec 04 15:25:07 crc kubenswrapper[4946]: I1204 15:25:07.848324 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" event={"ID":"c2228f15-2ca3-4507-8fa3-ed675c6648c3","Type":"ContainerDied","Data":"f555729b14aa7a8cce762cd89a0a878f371d03d68c9fb4c4451314c4dc9dcef5"} Dec 04 15:25:07 crc kubenswrapper[4946]: I1204 15:25:07.890160 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.458270 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-d6b8cfb46-xzwxx"] Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.461221 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.471125 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.471458 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.494335 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-d6b8cfb46-xzwxx"] Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.589741 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a807e28-4c6a-435c-b640-a11ae6770632-logs\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.589985 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-config-data\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.590042 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-public-tls-certs\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.590071 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xbsx\" (UniqueName: \"kubernetes.io/projected/6a807e28-4c6a-435c-b640-a11ae6770632-kube-api-access-9xbsx\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.590130 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-combined-ca-bundle\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.590167 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-internal-tls-certs\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.590198 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-config-data-custom\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.693644 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-combined-ca-bundle\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.693750 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-internal-tls-certs\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.693798 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-config-data-custom\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.693920 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a807e28-4c6a-435c-b640-a11ae6770632-logs\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.693969 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-config-data\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.694033 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-public-tls-certs\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.694071 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xbsx\" (UniqueName: \"kubernetes.io/projected/6a807e28-4c6a-435c-b640-a11ae6770632-kube-api-access-9xbsx\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.695691 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a807e28-4c6a-435c-b640-a11ae6770632-logs\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.706051 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-config-data-custom\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.707063 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-config-data\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.707918 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-combined-ca-bundle\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.710898 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-internal-tls-certs\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.724647 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xbsx\" (UniqueName: \"kubernetes.io/projected/6a807e28-4c6a-435c-b640-a11ae6770632-kube-api-access-9xbsx\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.726621 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a807e28-4c6a-435c-b640-a11ae6770632-public-tls-certs\") pod \"barbican-api-d6b8cfb46-xzwxx\" (UID: \"6a807e28-4c6a-435c-b640-a11ae6770632\") " pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.787048 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.881215 4946 generic.go:334] "Generic (PLEG): container finished" podID="c6df584d-65d7-4829-8937-3ac0ab49b71b" containerID="fa591cb12a326a0be71c8f464fcc92ed7afe15e10f7e6befabefbc25961efe9b" exitCode=0 Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.881358 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-w5njq" event={"ID":"c6df584d-65d7-4829-8937-3ac0ab49b71b","Type":"ContainerDied","Data":"fa591cb12a326a0be71c8f464fcc92ed7afe15e10f7e6befabefbc25961efe9b"} Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.890353 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" event={"ID":"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2","Type":"ContainerStarted","Data":"1a848226f03d18b8add738482f7fdcec3a7d584c40d0cbae96a5aa13f3ee7843"} Dec 04 15:25:08 crc kubenswrapper[4946]: I1204 15:25:08.897383 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd","Type":"ContainerStarted","Data":"fa0b47d8979f8341a965e00afe7ff7f4c1576e976667c13aa405159dbc6c1425"} Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.062464 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.106491 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-svc\") pod \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.106610 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p2lpq\" (UniqueName: \"kubernetes.io/projected/c2228f15-2ca3-4507-8fa3-ed675c6648c3-kube-api-access-p2lpq\") pod \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.106821 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-config\") pod \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.106883 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-sb\") pod \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.106913 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-swift-storage-0\") pod \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.106966 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-nb\") pod \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\" (UID: \"c2228f15-2ca3-4507-8fa3-ed675c6648c3\") " Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.132909 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2228f15-2ca3-4507-8fa3-ed675c6648c3-kube-api-access-p2lpq" (OuterVolumeSpecName: "kube-api-access-p2lpq") pod "c2228f15-2ca3-4507-8fa3-ed675c6648c3" (UID: "c2228f15-2ca3-4507-8fa3-ed675c6648c3"). InnerVolumeSpecName "kube-api-access-p2lpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.197508 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c2228f15-2ca3-4507-8fa3-ed675c6648c3" (UID: "c2228f15-2ca3-4507-8fa3-ed675c6648c3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.210385 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p2lpq\" (UniqueName: \"kubernetes.io/projected/c2228f15-2ca3-4507-8fa3-ed675c6648c3-kube-api-access-p2lpq\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.210420 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.225566 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c2228f15-2ca3-4507-8fa3-ed675c6648c3" (UID: "c2228f15-2ca3-4507-8fa3-ed675c6648c3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.262733 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-config" (OuterVolumeSpecName: "config") pod "c2228f15-2ca3-4507-8fa3-ed675c6648c3" (UID: "c2228f15-2ca3-4507-8fa3-ed675c6648c3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.273026 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c2228f15-2ca3-4507-8fa3-ed675c6648c3" (UID: "c2228f15-2ca3-4507-8fa3-ed675c6648c3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.287731 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c2228f15-2ca3-4507-8fa3-ed675c6648c3" (UID: "c2228f15-2ca3-4507-8fa3-ed675c6648c3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.312080 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.312135 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.312149 4946 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:09 crc kubenswrapper[4946]: I1204 15:25:09.312162 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2228f15-2ca3-4507-8fa3-ed675c6648c3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.112341 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.112587 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-cnx2z" event={"ID":"c2228f15-2ca3-4507-8fa3-ed675c6648c3","Type":"ContainerDied","Data":"40efd4b3e80b95375c319b18e1309f1f4e14fa239f0cd2b681dbd2f8b2f23881"} Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.114482 4946 scope.go:117] "RemoveContainer" containerID="f555729b14aa7a8cce762cd89a0a878f371d03d68c9fb4c4451314c4dc9dcef5" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.154629 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-d6b8cfb46-xzwxx"] Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.206849 4946 scope.go:117] "RemoveContainer" containerID="8aa5b40bab0f438b78de5d0401e8c231675292206f08cc374be12017223c78de" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.244969 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-cnx2z"] Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.286191 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-cnx2z"] Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.529724 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.530048 4946 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.622304 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.715799 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-certs\") pod \"c6df584d-65d7-4829-8937-3ac0ab49b71b\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.715860 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-combined-ca-bundle\") pod \"c6df584d-65d7-4829-8937-3ac0ab49b71b\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.715954 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-td52c\" (UniqueName: \"kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-kube-api-access-td52c\") pod \"c6df584d-65d7-4829-8937-3ac0ab49b71b\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.716012 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-scripts\") pod \"c6df584d-65d7-4829-8937-3ac0ab49b71b\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.716050 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-config-data\") pod \"c6df584d-65d7-4829-8937-3ac0ab49b71b\" (UID: \"c6df584d-65d7-4829-8937-3ac0ab49b71b\") " Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.722371 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-scripts" (OuterVolumeSpecName: "scripts") pod "c6df584d-65d7-4829-8937-3ac0ab49b71b" (UID: "c6df584d-65d7-4829-8937-3ac0ab49b71b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.723363 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-certs" (OuterVolumeSpecName: "certs") pod "c6df584d-65d7-4829-8937-3ac0ab49b71b" (UID: "c6df584d-65d7-4829-8937-3ac0ab49b71b"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.723487 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-kube-api-access-td52c" (OuterVolumeSpecName: "kube-api-access-td52c") pod "c6df584d-65d7-4829-8937-3ac0ab49b71b" (UID: "c6df584d-65d7-4829-8937-3ac0ab49b71b"). InnerVolumeSpecName "kube-api-access-td52c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.742847 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.742996 4946 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.765863 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-config-data" (OuterVolumeSpecName: "config-data") pod "c6df584d-65d7-4829-8937-3ac0ab49b71b" (UID: "c6df584d-65d7-4829-8937-3ac0ab49b71b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.771159 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.819706 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-td52c\" (UniqueName: \"kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-kube-api-access-td52c\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.820247 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.820263 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.820277 4946 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/c6df584d-65d7-4829-8937-3ac0ab49b71b-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.855318 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6df584d-65d7-4829-8937-3ac0ab49b71b" (UID: "c6df584d-65d7-4829-8937-3ac0ab49b71b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:10 crc kubenswrapper[4946]: I1204 15:25:10.921948 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6df584d-65d7-4829-8937-3ac0ab49b71b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.031967 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.076005 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-7qkz8"] Dec 04 15:25:11 crc kubenswrapper[4946]: E1204 15:25:11.078000 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6df584d-65d7-4829-8937-3ac0ab49b71b" containerName="cloudkitty-db-sync" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.078031 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6df584d-65d7-4829-8937-3ac0ab49b71b" containerName="cloudkitty-db-sync" Dec 04 15:25:11 crc kubenswrapper[4946]: E1204 15:25:11.078059 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2228f15-2ca3-4507-8fa3-ed675c6648c3" containerName="init" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.078068 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2228f15-2ca3-4507-8fa3-ed675c6648c3" containerName="init" Dec 04 15:25:11 crc kubenswrapper[4946]: E1204 15:25:11.078150 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2228f15-2ca3-4507-8fa3-ed675c6648c3" containerName="dnsmasq-dns" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.078161 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2228f15-2ca3-4507-8fa3-ed675c6648c3" containerName="dnsmasq-dns" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.078442 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6df584d-65d7-4829-8937-3ac0ab49b71b" containerName="cloudkitty-db-sync" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.078472 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2228f15-2ca3-4507-8fa3-ed675c6648c3" containerName="dnsmasq-dns" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.079586 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.183940 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-7qkz8"] Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.207747 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" event={"ID":"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2","Type":"ContainerStarted","Data":"c39878a6833cdb5d8c04b1831cf482f7674e82914a8e97c1ba0ca68796d4a27a"} Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.219666 4946 generic.go:334] "Generic (PLEG): container finished" podID="e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" containerID="ce0faf47d27ebd0d8e26634dcc9ecd904c2dc788a92ec2518a8af894edcd4482" exitCode=0 Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.219741 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" event={"ID":"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2","Type":"ContainerDied","Data":"ce0faf47d27ebd0d8e26634dcc9ecd904c2dc788a92ec2518a8af894edcd4482"} Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.233934 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d869cc959-j4wsw" event={"ID":"47433338-b9cd-4b5d-beaf-e551ca335c0e","Type":"ContainerStarted","Data":"2a28e18fe8e1f7a097d92bef170412dcb3d1a43cc1824322ec92585da39ed49c"} Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.235230 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-certs\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.235444 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-config-data\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.235470 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n98s7\" (UniqueName: \"kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-kube-api-access-n98s7\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.235520 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-scripts\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.235583 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-combined-ca-bundle\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.248132 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-w5njq" event={"ID":"c6df584d-65d7-4829-8937-3ac0ab49b71b","Type":"ContainerDied","Data":"c5e7a94d74809ddde5b8b8e89b28964dc3e46b551c53d8f9eeed6649226bd260"} Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.248188 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5e7a94d74809ddde5b8b8e89b28964dc3e46b551c53d8f9eeed6649226bd260" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.248268 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-w5njq" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.264692 4946 generic.go:334] "Generic (PLEG): container finished" podID="c5b6c82d-825a-448c-a03c-53dee6f4bfc0" containerID="6db82a66a2b56e240fabd9acf126caf510fd23306360ac6609d8db56861245de" exitCode=0 Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.264765 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6bf8c9bfd6-lcqwj" event={"ID":"c5b6c82d-825a-448c-a03c-53dee6f4bfc0","Type":"ContainerDied","Data":"6db82a66a2b56e240fabd9acf126caf510fd23306360ac6609d8db56861245de"} Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.275692 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-d6b8cfb46-xzwxx" event={"ID":"6a807e28-4c6a-435c-b640-a11ae6770632","Type":"ContainerStarted","Data":"d1fcfdc05867bb729e0ba820d865ff91c26a21d7bc4adc2bf5eed8f99dca9dd9"} Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.275768 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-d6b8cfb46-xzwxx" event={"ID":"6a807e28-4c6a-435c-b640-a11ae6770632","Type":"ContainerStarted","Data":"f08f7500f18d32caaf68308823edcfcf5a797c44cac7da62ea9ff3303afc52d4"} Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.341679 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-certs\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.342485 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-config-data\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.342521 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n98s7\" (UniqueName: \"kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-kube-api-access-n98s7\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.342563 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-scripts\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.342638 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-combined-ca-bundle\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.349277 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-certs\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.349529 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-combined-ca-bundle\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.349793 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-scripts\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.350897 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-config-data\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.361174 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n98s7\" (UniqueName: \"kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-kube-api-access-n98s7\") pod \"cloudkitty-storageinit-7qkz8\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.427743 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:11 crc kubenswrapper[4946]: I1204 15:25:11.473012 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2228f15-2ca3-4507-8fa3-ed675c6648c3" path="/var/lib/kubelet/pods/c2228f15-2ca3-4507-8fa3-ed675c6648c3/volumes" Dec 04 15:25:12 crc kubenswrapper[4946]: I1204 15:25:12.295577 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a2770ac-74ec-4a84-9f54-45602f47dd3a","Type":"ContainerStarted","Data":"fb52bd5d1ea1de828858d205cb57cefe2050c99e6f47135ad23e3e1e41ef6787"} Dec 04 15:25:12 crc kubenswrapper[4946]: I1204 15:25:12.298440 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd","Type":"ContainerStarted","Data":"0a2472f31a8e5c8d5dc9b0437835174fe608db8fe2f9b004645aea35a3c5ba71"} Dec 04 15:25:14 crc kubenswrapper[4946]: I1204 15:25:14.448509 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:14 crc kubenswrapper[4946]: I1204 15:25:14.813312 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.601023 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.732940 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-config\") pod \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.733224 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-httpd-config\") pod \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.733310 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmkqd\" (UniqueName: \"kubernetes.io/projected/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-kube-api-access-gmkqd\") pod \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.734305 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-combined-ca-bundle\") pod \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.734423 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-ovndb-tls-certs\") pod \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\" (UID: \"c5b6c82d-825a-448c-a03c-53dee6f4bfc0\") " Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.741789 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-kube-api-access-gmkqd" (OuterVolumeSpecName: "kube-api-access-gmkqd") pod "c5b6c82d-825a-448c-a03c-53dee6f4bfc0" (UID: "c5b6c82d-825a-448c-a03c-53dee6f4bfc0"). InnerVolumeSpecName "kube-api-access-gmkqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.742159 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "c5b6c82d-825a-448c-a03c-53dee6f4bfc0" (UID: "c5b6c82d-825a-448c-a03c-53dee6f4bfc0"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.800970 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-config" (OuterVolumeSpecName: "config") pod "c5b6c82d-825a-448c-a03c-53dee6f4bfc0" (UID: "c5b6c82d-825a-448c-a03c-53dee6f4bfc0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.802843 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5b6c82d-825a-448c-a03c-53dee6f4bfc0" (UID: "c5b6c82d-825a-448c-a03c-53dee6f4bfc0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.838084 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.838139 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.838151 4946 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.838161 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmkqd\" (UniqueName: \"kubernetes.io/projected/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-kube-api-access-gmkqd\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.840553 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "c5b6c82d-825a-448c-a03c-53dee6f4bfc0" (UID: "c5b6c82d-825a-448c-a03c-53dee6f4bfc0"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:17 crc kubenswrapper[4946]: I1204 15:25:17.940816 4946 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5b6c82d-825a-448c-a03c-53dee6f4bfc0-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:18 crc kubenswrapper[4946]: I1204 15:25:18.400650 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6bf8c9bfd6-lcqwj" event={"ID":"c5b6c82d-825a-448c-a03c-53dee6f4bfc0","Type":"ContainerDied","Data":"30d4b5b81ad6772a3397a56af948411e43e55872a9ce5ef8806c6113440f0186"} Dec 04 15:25:18 crc kubenswrapper[4946]: I1204 15:25:18.400729 4946 scope.go:117] "RemoveContainer" containerID="50743d59f9fc4173f74e2772198eda1df47511c19225da642f2e66a708a571f1" Dec 04 15:25:18 crc kubenswrapper[4946]: I1204 15:25:18.401338 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6bf8c9bfd6-lcqwj" Dec 04 15:25:18 crc kubenswrapper[4946]: I1204 15:25:18.457103 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6bf8c9bfd6-lcqwj"] Dec 04 15:25:18 crc kubenswrapper[4946]: I1204 15:25:18.470170 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6bf8c9bfd6-lcqwj"] Dec 04 15:25:18 crc kubenswrapper[4946]: I1204 15:25:18.801578 4946 scope.go:117] "RemoveContainer" containerID="6db82a66a2b56e240fabd9acf126caf510fd23306360ac6609d8db56861245de" Dec 04 15:25:19 crc kubenswrapper[4946]: I1204 15:25:19.175432 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-7qkz8"] Dec 04 15:25:19 crc kubenswrapper[4946]: I1204 15:25:19.438261 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-7qkz8" event={"ID":"4277efcb-7f69-4cb2-9999-09d884c5b706","Type":"ContainerStarted","Data":"6a9e76f6f834f3bf0e248b485368ba590e842d744d5fc97c0d7aa00ecf1353c6"} Dec 04 15:25:19 crc kubenswrapper[4946]: I1204 15:25:19.483429 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5b6c82d-825a-448c-a03c-53dee6f4bfc0" path="/var/lib/kubelet/pods/c5b6c82d-825a-448c-a03c-53dee6f4bfc0/volumes" Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.492771 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a2770ac-74ec-4a84-9f54-45602f47dd3a","Type":"ContainerStarted","Data":"524ed05af88c7256deb0ddf998a5a9b4e0a80e273636f72d746234bc88c939e4"} Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.496634 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" event={"ID":"71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2","Type":"ContainerStarted","Data":"d399b8f8a1103700174479eb18eca58459cc96f891019a2fb8c05a4d42d5341d"} Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.499323 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" event={"ID":"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2","Type":"ContainerStarted","Data":"d73f51f2bfc21958963cbd4bd41d0d72350ef5cac03b0b891b42ebbcc75a7399"} Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.500007 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.501645 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd","Type":"ContainerStarted","Data":"0127d815e1b4411476f87ac6c09fd13ed369b5ffa64e9597645717208c7168d9"} Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.501838 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerName="cinder-api-log" containerID="cri-o://0a2472f31a8e5c8d5dc9b0437835174fe608db8fe2f9b004645aea35a3c5ba71" gracePeriod=30 Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.502355 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.502411 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerName="cinder-api" containerID="cri-o://0127d815e1b4411476f87ac6c09fd13ed369b5ffa64e9597645717208c7168d9" gracePeriod=30 Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.515899 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-7qkz8" event={"ID":"4277efcb-7f69-4cb2-9999-09d884c5b706","Type":"ContainerStarted","Data":"b53922ed5de7d8d9049a3399f7197909ea2442d4593399fa4b1e9dea556ab77c"} Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.521235 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d869cc959-j4wsw" event={"ID":"47433338-b9cd-4b5d-beaf-e551ca335c0e","Type":"ContainerStarted","Data":"bdf1273a92d3b523af8178efd76772dea79b4cdcbed6060710b97ba64bbe7f59"} Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.527995 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef","Type":"ContainerStarted","Data":"16ad3a956916e657bfee872d1aed21a712fc056e6162e6780f2a5174e324c128"} Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.528226 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="ceilometer-central-agent" containerID="cri-o://96aa62470cc13084bd513c6d04189c795a7f8a0a359dcfc4789f76c936147981" gracePeriod=30 Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.528317 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.528359 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="proxy-httpd" containerID="cri-o://16ad3a956916e657bfee872d1aed21a712fc056e6162e6780f2a5174e324c128" gracePeriod=30 Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.528403 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="sg-core" containerID="cri-o://e1861f0fbc05f59f814d8127d7687682c33d543b0097b40e1ff4b016a9589ebf" gracePeriod=30 Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.528443 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="ceilometer-notification-agent" containerID="cri-o://4178b6c53a12c8d92b1608ca5b654aebdc2ff59365faeaea745c2f1cc3e861fb" gracePeriod=30 Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.542941 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-d6b8cfb46-xzwxx" event={"ID":"6a807e28-4c6a-435c-b640-a11ae6770632","Type":"ContainerStarted","Data":"47194b046032d0ad99f33e633cbb7fa958de8a77b22a2952b50d9c85d35185cb"} Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.544605 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.544636 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.879804 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=13.39629712 podStartE2EDuration="17.879672299s" podCreationTimestamp="2025-12-04 15:25:03 +0000 UTC" firstStartedPulling="2025-12-04 15:25:05.124416356 +0000 UTC m=+1356.010459997" lastFinishedPulling="2025-12-04 15:25:09.607791535 +0000 UTC m=+1360.493835176" observedRunningTime="2025-12-04 15:25:20.832316303 +0000 UTC m=+1371.718359944" watchObservedRunningTime="2025-12-04 15:25:20.879672299 +0000 UTC m=+1371.765715940" Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.973351 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5dbfff5fc8-dg589" podStartSLOduration=13.665117167 podStartE2EDuration="19.973335552s" podCreationTimestamp="2025-12-04 15:25:01 +0000 UTC" firstStartedPulling="2025-12-04 15:25:03.0630994 +0000 UTC m=+1353.949143051" lastFinishedPulling="2025-12-04 15:25:09.371317795 +0000 UTC m=+1360.257361436" observedRunningTime="2025-12-04 15:25:20.867495501 +0000 UTC m=+1371.753539142" watchObservedRunningTime="2025-12-04 15:25:20.973335552 +0000 UTC m=+1371.859379193" Dec 04 15:25:20 crc kubenswrapper[4946]: I1204 15:25:20.984023 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-d6b8cfb46-xzwxx" podStartSLOduration=12.983981199 podStartE2EDuration="12.983981199s" podCreationTimestamp="2025-12-04 15:25:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:20.897027336 +0000 UTC m=+1371.783070977" watchObservedRunningTime="2025-12-04 15:25:20.983981199 +0000 UTC m=+1371.870024850" Dec 04 15:25:21 crc kubenswrapper[4946]: I1204 15:25:21.010625 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5d869cc959-j4wsw" podStartSLOduration=13.775413567 podStartE2EDuration="20.010600026s" podCreationTimestamp="2025-12-04 15:25:01 +0000 UTC" firstStartedPulling="2025-12-04 15:25:03.066427489 +0000 UTC m=+1353.952471130" lastFinishedPulling="2025-12-04 15:25:09.301613948 +0000 UTC m=+1360.187657589" observedRunningTime="2025-12-04 15:25:20.995588961 +0000 UTC m=+1371.881632622" watchObservedRunningTime="2025-12-04 15:25:21.010600026 +0000 UTC m=+1371.896643667" Dec 04 15:25:21 crc kubenswrapper[4946]: I1204 15:25:21.048460 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=17.047330945 podStartE2EDuration="17.047330945s" podCreationTimestamp="2025-12-04 15:25:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:21.042525236 +0000 UTC m=+1371.928568877" watchObservedRunningTime="2025-12-04 15:25:21.047330945 +0000 UTC m=+1371.933374586" Dec 04 15:25:21 crc kubenswrapper[4946]: I1204 15:25:21.088965 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-7qkz8" podStartSLOduration=10.088934666 podStartE2EDuration="10.088934666s" podCreationTimestamp="2025-12-04 15:25:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:21.078833554 +0000 UTC m=+1371.964877195" watchObservedRunningTime="2025-12-04 15:25:21.088934666 +0000 UTC m=+1371.974978307" Dec 04 15:25:21 crc kubenswrapper[4946]: I1204 15:25:21.143957 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" podStartSLOduration=18.143932217 podStartE2EDuration="18.143932217s" podCreationTimestamp="2025-12-04 15:25:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:21.132606012 +0000 UTC m=+1372.018649653" watchObservedRunningTime="2025-12-04 15:25:21.143932217 +0000 UTC m=+1372.029975858" Dec 04 15:25:21 crc kubenswrapper[4946]: I1204 15:25:21.199974 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=5.461710519 podStartE2EDuration="1m27.199949786s" podCreationTimestamp="2025-12-04 15:23:54 +0000 UTC" firstStartedPulling="2025-12-04 15:23:57.079704484 +0000 UTC m=+1287.965748125" lastFinishedPulling="2025-12-04 15:25:18.817943751 +0000 UTC m=+1369.703987392" observedRunningTime="2025-12-04 15:25:21.188547439 +0000 UTC m=+1372.074591090" watchObservedRunningTime="2025-12-04 15:25:21.199949786 +0000 UTC m=+1372.085993427" Dec 04 15:25:21 crc kubenswrapper[4946]: I1204 15:25:21.603602 4946 generic.go:334] "Generic (PLEG): container finished" podID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerID="e1861f0fbc05f59f814d8127d7687682c33d543b0097b40e1ff4b016a9589ebf" exitCode=2 Dec 04 15:25:21 crc kubenswrapper[4946]: I1204 15:25:21.604021 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef","Type":"ContainerDied","Data":"e1861f0fbc05f59f814d8127d7687682c33d543b0097b40e1ff4b016a9589ebf"} Dec 04 15:25:21 crc kubenswrapper[4946]: I1204 15:25:21.618882 4946 generic.go:334] "Generic (PLEG): container finished" podID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerID="0a2472f31a8e5c8d5dc9b0437835174fe608db8fe2f9b004645aea35a3c5ba71" exitCode=143 Dec 04 15:25:21 crc kubenswrapper[4946]: I1204 15:25:21.619370 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd","Type":"ContainerDied","Data":"0a2472f31a8e5c8d5dc9b0437835174fe608db8fe2f9b004645aea35a3c5ba71"} Dec 04 15:25:22 crc kubenswrapper[4946]: E1204 15:25:22.451901 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ff96dc8_21f8_4417_9d8e_8ee12f04a3ef.slice/crio-conmon-16ad3a956916e657bfee872d1aed21a712fc056e6162e6780f2a5174e324c128.scope\": RecentStats: unable to find data in memory cache]" Dec 04 15:25:22 crc kubenswrapper[4946]: I1204 15:25:22.488529 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:25:22 crc kubenswrapper[4946]: I1204 15:25:22.488603 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:25:22 crc kubenswrapper[4946]: I1204 15:25:22.633155 4946 generic.go:334] "Generic (PLEG): container finished" podID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerID="16ad3a956916e657bfee872d1aed21a712fc056e6162e6780f2a5174e324c128" exitCode=0 Dec 04 15:25:22 crc kubenswrapper[4946]: I1204 15:25:22.633199 4946 generic.go:334] "Generic (PLEG): container finished" podID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerID="96aa62470cc13084bd513c6d04189c795a7f8a0a359dcfc4789f76c936147981" exitCode=0 Dec 04 15:25:22 crc kubenswrapper[4946]: I1204 15:25:22.633235 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef","Type":"ContainerDied","Data":"16ad3a956916e657bfee872d1aed21a712fc056e6162e6780f2a5174e324c128"} Dec 04 15:25:22 crc kubenswrapper[4946]: I1204 15:25:22.633295 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef","Type":"ContainerDied","Data":"96aa62470cc13084bd513c6d04189c795a7f8a0a359dcfc4789f76c936147981"} Dec 04 15:25:22 crc kubenswrapper[4946]: I1204 15:25:22.633303 4946 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 15:25:23 crc kubenswrapper[4946]: I1204 15:25:23.305841 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:24 crc kubenswrapper[4946]: I1204 15:25:24.304909 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 04 15:25:24 crc kubenswrapper[4946]: I1204 15:25:24.595303 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:24 crc kubenswrapper[4946]: I1204 15:25:24.662330 4946 generic.go:334] "Generic (PLEG): container finished" podID="4277efcb-7f69-4cb2-9999-09d884c5b706" containerID="b53922ed5de7d8d9049a3399f7197909ea2442d4593399fa4b1e9dea556ab77c" exitCode=0 Dec 04 15:25:24 crc kubenswrapper[4946]: I1204 15:25:24.662387 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-7qkz8" event={"ID":"4277efcb-7f69-4cb2-9999-09d884c5b706","Type":"ContainerDied","Data":"b53922ed5de7d8d9049a3399f7197909ea2442d4593399fa4b1e9dea556ab77c"} Dec 04 15:25:24 crc kubenswrapper[4946]: I1204 15:25:24.708561 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-vpcnk"] Dec 04 15:25:24 crc kubenswrapper[4946]: I1204 15:25:24.708864 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" podUID="9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" containerName="dnsmasq-dns" containerID="cri-o://1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e" gracePeriod=10 Dec 04 15:25:24 crc kubenswrapper[4946]: I1204 15:25:24.909878 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 04 15:25:24 crc kubenswrapper[4946]: I1204 15:25:24.993331 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.430143 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.595706 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-svc\") pod \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.596454 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-swift-storage-0\") pod \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.596506 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-config\") pod \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.596571 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvpmd\" (UniqueName: \"kubernetes.io/projected/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-kube-api-access-kvpmd\") pod \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.596804 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-sb\") pod \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.597016 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-nb\") pod \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\" (UID: \"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144\") " Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.617536 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-kube-api-access-kvpmd" (OuterVolumeSpecName: "kube-api-access-kvpmd") pod "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" (UID: "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144"). InnerVolumeSpecName "kube-api-access-kvpmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.677066 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" (UID: "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.682772 4946 generic.go:334] "Generic (PLEG): container finished" podID="9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" containerID="1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e" exitCode=0 Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.682916 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" event={"ID":"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144","Type":"ContainerDied","Data":"1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e"} Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.682999 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" event={"ID":"9b3cb211-e6b7-4ab2-9978-bb62cf1ff144","Type":"ContainerDied","Data":"83e337d1cf8e55c96dc52bbea8a2d75527c5e4bc9bd81ea0358ff233f56f4a34"} Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.683028 4946 scope.go:117] "RemoveContainer" containerID="1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.683130 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9a2770ac-74ec-4a84-9f54-45602f47dd3a" containerName="cinder-scheduler" containerID="cri-o://fb52bd5d1ea1de828858d205cb57cefe2050c99e6f47135ad23e3e1e41ef6787" gracePeriod=30 Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.683668 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9a2770ac-74ec-4a84-9f54-45602f47dd3a" containerName="probe" containerID="cri-o://524ed05af88c7256deb0ddf998a5a9b4e0a80e273636f72d746234bc88c939e4" gracePeriod=30 Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.686482 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-vpcnk" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.696363 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" (UID: "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.699887 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.699916 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.699928 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvpmd\" (UniqueName: \"kubernetes.io/projected/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-kube-api-access-kvpmd\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.705014 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-config" (OuterVolumeSpecName: "config") pod "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" (UID: "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.710381 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" (UID: "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.731105 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" (UID: "9b3cb211-e6b7-4ab2-9978-bb62cf1ff144"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.798633 4946 scope.go:117] "RemoveContainer" containerID="e18351f352a6d42119d3043d041b04ed4bbbd19de51a58becd7d4dd001a3ba83" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.801582 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.801618 4946 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.801647 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.831163 4946 scope.go:117] "RemoveContainer" containerID="1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e" Dec 04 15:25:25 crc kubenswrapper[4946]: E1204 15:25:25.832047 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e\": container with ID starting with 1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e not found: ID does not exist" containerID="1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.832078 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e"} err="failed to get container status \"1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e\": rpc error: code = NotFound desc = could not find container \"1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e\": container with ID starting with 1fc0ce0643aa30266903bd85829348add81dd6adf9ffd70a9a2eafa9d1a4977e not found: ID does not exist" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.832104 4946 scope.go:117] "RemoveContainer" containerID="e18351f352a6d42119d3043d041b04ed4bbbd19de51a58becd7d4dd001a3ba83" Dec 04 15:25:25 crc kubenswrapper[4946]: E1204 15:25:25.832407 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e18351f352a6d42119d3043d041b04ed4bbbd19de51a58becd7d4dd001a3ba83\": container with ID starting with e18351f352a6d42119d3043d041b04ed4bbbd19de51a58becd7d4dd001a3ba83 not found: ID does not exist" containerID="e18351f352a6d42119d3043d041b04ed4bbbd19de51a58becd7d4dd001a3ba83" Dec 04 15:25:25 crc kubenswrapper[4946]: I1204 15:25:25.832425 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e18351f352a6d42119d3043d041b04ed4bbbd19de51a58becd7d4dd001a3ba83"} err="failed to get container status \"e18351f352a6d42119d3043d041b04ed4bbbd19de51a58becd7d4dd001a3ba83\": rpc error: code = NotFound desc = could not find container \"e18351f352a6d42119d3043d041b04ed4bbbd19de51a58becd7d4dd001a3ba83\": container with ID starting with e18351f352a6d42119d3043d041b04ed4bbbd19de51a58becd7d4dd001a3ba83 not found: ID does not exist" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.122281 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.151185 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-vpcnk"] Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.166183 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-vpcnk"] Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.209720 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-config-data\") pod \"4277efcb-7f69-4cb2-9999-09d884c5b706\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.209866 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n98s7\" (UniqueName: \"kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-kube-api-access-n98s7\") pod \"4277efcb-7f69-4cb2-9999-09d884c5b706\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.210024 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-certs\") pod \"4277efcb-7f69-4cb2-9999-09d884c5b706\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.210247 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-scripts\") pod \"4277efcb-7f69-4cb2-9999-09d884c5b706\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.210290 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-combined-ca-bundle\") pod \"4277efcb-7f69-4cb2-9999-09d884c5b706\" (UID: \"4277efcb-7f69-4cb2-9999-09d884c5b706\") " Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.217912 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-scripts" (OuterVolumeSpecName: "scripts") pod "4277efcb-7f69-4cb2-9999-09d884c5b706" (UID: "4277efcb-7f69-4cb2-9999-09d884c5b706"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.218733 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-kube-api-access-n98s7" (OuterVolumeSpecName: "kube-api-access-n98s7") pod "4277efcb-7f69-4cb2-9999-09d884c5b706" (UID: "4277efcb-7f69-4cb2-9999-09d884c5b706"). InnerVolumeSpecName "kube-api-access-n98s7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.240989 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-certs" (OuterVolumeSpecName: "certs") pod "4277efcb-7f69-4cb2-9999-09d884c5b706" (UID: "4277efcb-7f69-4cb2-9999-09d884c5b706"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.268828 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4277efcb-7f69-4cb2-9999-09d884c5b706" (UID: "4277efcb-7f69-4cb2-9999-09d884c5b706"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.293750 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-config-data" (OuterVolumeSpecName: "config-data") pod "4277efcb-7f69-4cb2-9999-09d884c5b706" (UID: "4277efcb-7f69-4cb2-9999-09d884c5b706"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.313604 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n98s7\" (UniqueName: \"kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-kube-api-access-n98s7\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.313891 4946 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4277efcb-7f69-4cb2-9999-09d884c5b706-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.313966 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.314072 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.314208 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4277efcb-7f69-4cb2-9999-09d884c5b706-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.698633 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-7qkz8" event={"ID":"4277efcb-7f69-4cb2-9999-09d884c5b706","Type":"ContainerDied","Data":"6a9e76f6f834f3bf0e248b485368ba590e842d744d5fc97c0d7aa00ecf1353c6"} Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.698688 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a9e76f6f834f3bf0e248b485368ba590e842d744d5fc97c0d7aa00ecf1353c6" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.698715 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-7qkz8" Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.702837 4946 generic.go:334] "Generic (PLEG): container finished" podID="9a2770ac-74ec-4a84-9f54-45602f47dd3a" containerID="524ed05af88c7256deb0ddf998a5a9b4e0a80e273636f72d746234bc88c939e4" exitCode=0 Dec 04 15:25:26 crc kubenswrapper[4946]: I1204 15:25:26.702906 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a2770ac-74ec-4a84-9f54-45602f47dd3a","Type":"ContainerDied","Data":"524ed05af88c7256deb0ddf998a5a9b4e0a80e273636f72d746234bc88c939e4"} Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.182347 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-qrznx"] Dec 04 15:25:27 crc kubenswrapper[4946]: E1204 15:25:27.184688 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" containerName="dnsmasq-dns" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.184718 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" containerName="dnsmasq-dns" Dec 04 15:25:27 crc kubenswrapper[4946]: E1204 15:25:27.184736 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5b6c82d-825a-448c-a03c-53dee6f4bfc0" containerName="neutron-httpd" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.184745 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5b6c82d-825a-448c-a03c-53dee6f4bfc0" containerName="neutron-httpd" Dec 04 15:25:27 crc kubenswrapper[4946]: E1204 15:25:27.184772 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5b6c82d-825a-448c-a03c-53dee6f4bfc0" containerName="neutron-api" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.184779 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5b6c82d-825a-448c-a03c-53dee6f4bfc0" containerName="neutron-api" Dec 04 15:25:27 crc kubenswrapper[4946]: E1204 15:25:27.184828 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4277efcb-7f69-4cb2-9999-09d884c5b706" containerName="cloudkitty-storageinit" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.184838 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="4277efcb-7f69-4cb2-9999-09d884c5b706" containerName="cloudkitty-storageinit" Dec 04 15:25:27 crc kubenswrapper[4946]: E1204 15:25:27.184849 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" containerName="init" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.184856 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" containerName="init" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.185167 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5b6c82d-825a-448c-a03c-53dee6f4bfc0" containerName="neutron-api" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.185199 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="4277efcb-7f69-4cb2-9999-09d884c5b706" containerName="cloudkitty-storageinit" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.185216 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5b6c82d-825a-448c-a03c-53dee6f4bfc0" containerName="neutron-httpd" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.185233 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" containerName="dnsmasq-dns" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.186920 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.217040 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.218601 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.237996 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.238167 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.238354 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-z46wn" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.242786 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.243068 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.288416 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-qrznx"] Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.316551 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342039 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342128 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-scripts\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342170 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342192 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-config\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342246 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342271 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342298 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-certs\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342318 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342361 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx4pt\" (UniqueName: \"kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-kube-api-access-jx4pt\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342416 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342462 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-svc\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.342497 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p267c\" (UniqueName: \"kubernetes.io/projected/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-kube-api-access-p267c\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.446832 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx4pt\" (UniqueName: \"kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-kube-api-access-jx4pt\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.446950 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.447026 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-svc\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.447073 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p267c\" (UniqueName: \"kubernetes.io/projected/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-kube-api-access-p267c\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.447140 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.447185 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-scripts\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.447207 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.447233 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-config\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.447282 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.447304 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.447328 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-certs\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.447357 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.448586 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.450049 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-svc\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.450600 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.459531 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-scripts\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.463662 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.466176 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-config\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.473374 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.480003 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.488020 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-certs\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.503447 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.505225 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b3cb211-e6b7-4ab2-9978-bb62cf1ff144" path="/var/lib/kubelet/pods/9b3cb211-e6b7-4ab2-9978-bb62cf1ff144/volumes" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.525416 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p267c\" (UniqueName: \"kubernetes.io/projected/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-kube-api-access-p267c\") pod \"dnsmasq-dns-67bdc55879-qrznx\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.552866 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx4pt\" (UniqueName: \"kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-kube-api-access-jx4pt\") pod \"cloudkitty-proc-0\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.568855 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.642251 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.645176 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.647681 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.656704 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.783778 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-scripts\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.783836 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/717f061d-c408-470b-982d-059f1cd4c93e-logs\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.783867 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.783913 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz2fg\" (UniqueName: \"kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-kube-api-access-dz2fg\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.783949 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-certs\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.784004 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.784027 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.822946 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.885776 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.885852 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.885986 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-scripts\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.886010 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/717f061d-c408-470b-982d-059f1cd4c93e-logs\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.886035 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.886083 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz2fg\" (UniqueName: \"kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-kube-api-access-dz2fg\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.886133 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-certs\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.889146 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/717f061d-c408-470b-982d-059f1cd4c93e-logs\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.892741 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-scripts\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.895713 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.895857 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.896330 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-certs\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.897365 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.913983 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz2fg\" (UniqueName: \"kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-kube-api-access-dz2fg\") pod \"cloudkitty-api-0\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:27 crc kubenswrapper[4946]: I1204 15:25:27.991656 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 04 15:25:28 crc kubenswrapper[4946]: I1204 15:25:28.479434 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-qrznx"] Dec 04 15:25:28 crc kubenswrapper[4946]: I1204 15:25:28.643961 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:25:28 crc kubenswrapper[4946]: I1204 15:25:28.779556 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" event={"ID":"53c3c8fa-f2df-43db-aa5b-cbee4b29d487","Type":"ContainerStarted","Data":"80435d55841e2ee7f94f7c40376830feb7db0936044e3453df9c7bf9b1b98278"} Dec 04 15:25:28 crc kubenswrapper[4946]: I1204 15:25:28.782051 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"aa433d65-79b3-4465-86db-4d631aed9396","Type":"ContainerStarted","Data":"d59ec63d3f5b6443fe9adcb3b888f0f97cbd247a310ec7c915de7eb8d67ddf8a"} Dec 04 15:25:28 crc kubenswrapper[4946]: I1204 15:25:28.820879 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 04 15:25:28 crc kubenswrapper[4946]: W1204 15:25:28.822255 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod717f061d_c408_470b_982d_059f1cd4c93e.slice/crio-24926806ce6999ca3721fe0ed76540a9104bcdbf0ae3fe7c2f8dde702ad3a02c WatchSource:0}: Error finding container 24926806ce6999ca3721fe0ed76540a9104bcdbf0ae3fe7c2f8dde702ad3a02c: Status 404 returned error can't find the container with id 24926806ce6999ca3721fe0ed76540a9104bcdbf0ae3fe7c2f8dde702ad3a02c Dec 04 15:25:28 crc kubenswrapper[4946]: I1204 15:25:28.823230 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:25:29 crc kubenswrapper[4946]: I1204 15:25:29.841569 4946 generic.go:334] "Generic (PLEG): container finished" podID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerID="4178b6c53a12c8d92b1608ca5b654aebdc2ff59365faeaea745c2f1cc3e861fb" exitCode=0 Dec 04 15:25:29 crc kubenswrapper[4946]: I1204 15:25:29.842600 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef","Type":"ContainerDied","Data":"4178b6c53a12c8d92b1608ca5b654aebdc2ff59365faeaea745c2f1cc3e861fb"} Dec 04 15:25:29 crc kubenswrapper[4946]: I1204 15:25:29.859421 4946 generic.go:334] "Generic (PLEG): container finished" podID="9a2770ac-74ec-4a84-9f54-45602f47dd3a" containerID="fb52bd5d1ea1de828858d205cb57cefe2050c99e6f47135ad23e3e1e41ef6787" exitCode=0 Dec 04 15:25:29 crc kubenswrapper[4946]: I1204 15:25:29.859512 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a2770ac-74ec-4a84-9f54-45602f47dd3a","Type":"ContainerDied","Data":"fb52bd5d1ea1de828858d205cb57cefe2050c99e6f47135ad23e3e1e41ef6787"} Dec 04 15:25:29 crc kubenswrapper[4946]: I1204 15:25:29.917976 4946 generic.go:334] "Generic (PLEG): container finished" podID="53c3c8fa-f2df-43db-aa5b-cbee4b29d487" containerID="e6d6fd2823510114a11b4a940235e29694d57abdf7a92da214138f2fde5a4529" exitCode=0 Dec 04 15:25:29 crc kubenswrapper[4946]: I1204 15:25:29.918101 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" event={"ID":"53c3c8fa-f2df-43db-aa5b-cbee4b29d487","Type":"ContainerDied","Data":"e6d6fd2823510114a11b4a940235e29694d57abdf7a92da214138f2fde5a4529"} Dec 04 15:25:29 crc kubenswrapper[4946]: I1204 15:25:29.953483 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"717f061d-c408-470b-982d-059f1cd4c93e","Type":"ContainerStarted","Data":"b682c638c90256015b275bada9fe1818f3c55c135acb03afea9929bd1209c7d4"} Dec 04 15:25:29 crc kubenswrapper[4946]: I1204 15:25:29.953545 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"717f061d-c408-470b-982d-059f1cd4c93e","Type":"ContainerStarted","Data":"24926806ce6999ca3721fe0ed76540a9104bcdbf0ae3fe7c2f8dde702ad3a02c"} Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.319548 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.435752 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-scripts\") pod \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.435797 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-log-httpd\") pod \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.435859 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-combined-ca-bundle\") pod \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.435911 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-config-data\") pod \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.435964 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-run-httpd\") pod \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.436095 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-sg-core-conf-yaml\") pod \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.436149 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5mkk\" (UniqueName: \"kubernetes.io/projected/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-kube-api-access-c5mkk\") pod \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\" (UID: \"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.449487 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" (UID: "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.450817 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" (UID: "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.461048 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-kube-api-access-c5mkk" (OuterVolumeSpecName: "kube-api-access-c5mkk") pod "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" (UID: "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef"). InnerVolumeSpecName "kube-api-access-c5mkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.471044 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-scripts" (OuterVolumeSpecName: "scripts") pod "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" (UID: "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.553912 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.553943 4946 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.553956 4946 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.553966 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5mkk\" (UniqueName: \"kubernetes.io/projected/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-kube-api-access-c5mkk\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.611354 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.631563 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" (UID: "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.655925 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-combined-ca-bundle\") pod \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.656001 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9a2770ac-74ec-4a84-9f54-45602f47dd3a-etc-machine-id\") pod \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.656146 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-scripts\") pod \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.656185 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data\") pod \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.656212 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data-custom\") pod \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.656262 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrvbc\" (UniqueName: \"kubernetes.io/projected/9a2770ac-74ec-4a84-9f54-45602f47dd3a-kube-api-access-xrvbc\") pod \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\" (UID: \"9a2770ac-74ec-4a84-9f54-45602f47dd3a\") " Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.656567 4946 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.663254 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9a2770ac-74ec-4a84-9f54-45602f47dd3a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9a2770ac-74ec-4a84-9f54-45602f47dd3a" (UID: "9a2770ac-74ec-4a84-9f54-45602f47dd3a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.684878 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-scripts" (OuterVolumeSpecName: "scripts") pod "9a2770ac-74ec-4a84-9f54-45602f47dd3a" (UID: "9a2770ac-74ec-4a84-9f54-45602f47dd3a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.684999 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9a2770ac-74ec-4a84-9f54-45602f47dd3a" (UID: "9a2770ac-74ec-4a84-9f54-45602f47dd3a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.695479 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a2770ac-74ec-4a84-9f54-45602f47dd3a-kube-api-access-xrvbc" (OuterVolumeSpecName: "kube-api-access-xrvbc") pod "9a2770ac-74ec-4a84-9f54-45602f47dd3a" (UID: "9a2770ac-74ec-4a84-9f54-45602f47dd3a"). InnerVolumeSpecName "kube-api-access-xrvbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.718146 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" (UID: "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.759892 4946 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.759936 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrvbc\" (UniqueName: \"kubernetes.io/projected/9a2770ac-74ec-4a84-9f54-45602f47dd3a-kube-api-access-xrvbc\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.759951 4946 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9a2770ac-74ec-4a84-9f54-45602f47dd3a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.759963 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.759974 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.851934 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.856289 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-config-data" (OuterVolumeSpecName: "config-data") pod "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" (UID: "8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.864025 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.881518 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a2770ac-74ec-4a84-9f54-45602f47dd3a" (UID: "9a2770ac-74ec-4a84-9f54-45602f47dd3a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:30 crc kubenswrapper[4946]: I1204 15:25:30.967874 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.000362 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data" (OuterVolumeSpecName: "config-data") pod "9a2770ac-74ec-4a84-9f54-45602f47dd3a" (UID: "9a2770ac-74ec-4a84-9f54-45602f47dd3a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.010725 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" event={"ID":"53c3c8fa-f2df-43db-aa5b-cbee4b29d487","Type":"ContainerStarted","Data":"259e523792d0edb6d87809f5b03edf5a10c0001cd075ec27cde28084cbdf0eb6"} Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.012246 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.035758 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"717f061d-c408-470b-982d-059f1cd4c93e","Type":"ContainerStarted","Data":"85d2f3eb50387172b619e3e40cf7400587256d7a960fd108d3d7ce590875c200"} Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.036035 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.041603 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" podStartSLOduration=4.041583112 podStartE2EDuration="4.041583112s" podCreationTimestamp="2025-12-04 15:25:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:31.035762136 +0000 UTC m=+1381.921805777" watchObservedRunningTime="2025-12-04 15:25:31.041583112 +0000 UTC m=+1381.927626753" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.063975 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef","Type":"ContainerDied","Data":"58e5b7eb191568a992818308ec7b640b35f355935848eeda9a7b53c21c9b1069"} Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.064038 4946 scope.go:117] "RemoveContainer" containerID="16ad3a956916e657bfee872d1aed21a712fc056e6162e6780f2a5174e324c128" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.064287 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.071247 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a2770ac-74ec-4a84-9f54-45602f47dd3a-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.073047 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=4.073021809 podStartE2EDuration="4.073021809s" podCreationTimestamp="2025-12-04 15:25:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:31.067948533 +0000 UTC m=+1381.953992174" watchObservedRunningTime="2025-12-04 15:25:31.073021809 +0000 UTC m=+1381.959065440" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.097937 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a2770ac-74ec-4a84-9f54-45602f47dd3a","Type":"ContainerDied","Data":"81594ae692c95b4fc3455df607ee0b1d7f8ff881280f8da20268f7e1fe4e415c"} Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.098148 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.137838 4946 scope.go:117] "RemoveContainer" containerID="e1861f0fbc05f59f814d8127d7687682c33d543b0097b40e1ff4b016a9589ebf" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.228156 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.237391 4946 scope.go:117] "RemoveContainer" containerID="4178b6c53a12c8d92b1608ca5b654aebdc2ff59365faeaea745c2f1cc3e861fb" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.256187 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.292571 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.323816 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.373660 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:25:31 crc kubenswrapper[4946]: E1204 15:25:31.374319 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a2770ac-74ec-4a84-9f54-45602f47dd3a" containerName="cinder-scheduler" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374340 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a2770ac-74ec-4a84-9f54-45602f47dd3a" containerName="cinder-scheduler" Dec 04 15:25:31 crc kubenswrapper[4946]: E1204 15:25:31.374356 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a2770ac-74ec-4a84-9f54-45602f47dd3a" containerName="probe" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374365 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a2770ac-74ec-4a84-9f54-45602f47dd3a" containerName="probe" Dec 04 15:25:31 crc kubenswrapper[4946]: E1204 15:25:31.374378 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="proxy-httpd" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374384 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="proxy-httpd" Dec 04 15:25:31 crc kubenswrapper[4946]: E1204 15:25:31.374403 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="sg-core" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374409 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="sg-core" Dec 04 15:25:31 crc kubenswrapper[4946]: E1204 15:25:31.374440 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="ceilometer-notification-agent" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374446 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="ceilometer-notification-agent" Dec 04 15:25:31 crc kubenswrapper[4946]: E1204 15:25:31.374457 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="ceilometer-central-agent" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374464 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="ceilometer-central-agent" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374695 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a2770ac-74ec-4a84-9f54-45602f47dd3a" containerName="cinder-scheduler" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374705 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="sg-core" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374721 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="ceilometer-notification-agent" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374737 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="proxy-httpd" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374747 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" containerName="ceilometer-central-agent" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.374775 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a2770ac-74ec-4a84-9f54-45602f47dd3a" containerName="probe" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.381787 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.391269 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.391916 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.403942 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.407629 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.407679 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.407790 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdxkw\" (UniqueName: \"kubernetes.io/projected/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-kube-api-access-fdxkw\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.407820 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-scripts\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.407838 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-log-httpd\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.407925 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-run-httpd\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.407982 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-config-data\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.421309 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.427853 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.449773 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.474899 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef" path="/var/lib/kubelet/pods/8ff96dc8-21f8-4417-9d8e-8ee12f04a3ef/volumes" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.476080 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a2770ac-74ec-4a84-9f54-45602f47dd3a" path="/var/lib/kubelet/pods/9a2770ac-74ec-4a84-9f54-45602f47dd3a/volumes" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.476804 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510298 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpkjh\" (UniqueName: \"kubernetes.io/projected/fb133f81-1fe2-4e36-8663-8301e9373627-kube-api-access-kpkjh\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510348 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-config-data\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510382 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-config-data\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510413 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb133f81-1fe2-4e36-8663-8301e9373627-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510442 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510468 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510490 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510524 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-scripts\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510566 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510591 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdxkw\" (UniqueName: \"kubernetes.io/projected/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-kube-api-access-fdxkw\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510618 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-scripts\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510637 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-log-httpd\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.510708 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-run-httpd\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.511208 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-run-httpd\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.512623 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-log-httpd\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.519838 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.521400 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-scripts\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.523894 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.528945 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-config-data\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.534489 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdxkw\" (UniqueName: \"kubernetes.io/projected/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-kube-api-access-fdxkw\") pod \"ceilometer-0\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.614997 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpkjh\" (UniqueName: \"kubernetes.io/projected/fb133f81-1fe2-4e36-8663-8301e9373627-kube-api-access-kpkjh\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.615072 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-config-data\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.616175 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb133f81-1fe2-4e36-8663-8301e9373627-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.616353 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb133f81-1fe2-4e36-8663-8301e9373627-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.616422 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.616478 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-scripts\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.616674 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.623643 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-scripts\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.623850 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.625908 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.622747 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb133f81-1fe2-4e36-8663-8301e9373627-config-data\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.644785 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpkjh\" (UniqueName: \"kubernetes.io/projected/fb133f81-1fe2-4e36-8663-8301e9373627-kube-api-access-kpkjh\") pod \"cinder-scheduler-0\" (UID: \"fb133f81-1fe2-4e36-8663-8301e9373627\") " pod="openstack/cinder-scheduler-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.721023 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:25:31 crc kubenswrapper[4946]: I1204 15:25:31.750333 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 15:25:32 crc kubenswrapper[4946]: I1204 15:25:32.122683 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="717f061d-c408-470b-982d-059f1cd4c93e" containerName="cloudkitty-api-log" containerID="cri-o://b682c638c90256015b275bada9fe1818f3c55c135acb03afea9929bd1209c7d4" gracePeriod=30 Dec 04 15:25:32 crc kubenswrapper[4946]: I1204 15:25:32.122789 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="717f061d-c408-470b-982d-059f1cd4c93e" containerName="cloudkitty-api" containerID="cri-o://85d2f3eb50387172b619e3e40cf7400587256d7a960fd108d3d7ce590875c200" gracePeriod=30 Dec 04 15:25:32 crc kubenswrapper[4946]: I1204 15:25:32.637552 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-d6b8cfb46-xzwxx" Dec 04 15:25:32 crc kubenswrapper[4946]: I1204 15:25:32.698268 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:25:32 crc kubenswrapper[4946]: I1204 15:25:32.741164 4946 scope.go:117] "RemoveContainer" containerID="96aa62470cc13084bd513c6d04189c795a7f8a0a359dcfc4789f76c936147981" Dec 04 15:25:32 crc kubenswrapper[4946]: I1204 15:25:32.850248 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5c78d69c8-m42w5"] Dec 04 15:25:32 crc kubenswrapper[4946]: I1204 15:25:32.850583 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5c78d69c8-m42w5" podUID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerName="barbican-api-log" containerID="cri-o://79bd221dcfa9dfd38a6d062623f7ca01093e48f041e6ec5341e9b5d2be32fded" gracePeriod=30 Dec 04 15:25:32 crc kubenswrapper[4946]: I1204 15:25:32.851276 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5c78d69c8-m42w5" podUID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerName="barbican-api" containerID="cri-o://c5b65fdc664524ce0d0ef7529d4feff4767b17aa21281ae9efef589cf3366a2c" gracePeriod=30 Dec 04 15:25:32 crc kubenswrapper[4946]: I1204 15:25:32.882037 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6c965d6d44-d6246" Dec 04 15:25:33 crc kubenswrapper[4946]: I1204 15:25:33.215540 4946 generic.go:334] "Generic (PLEG): container finished" podID="717f061d-c408-470b-982d-059f1cd4c93e" containerID="85d2f3eb50387172b619e3e40cf7400587256d7a960fd108d3d7ce590875c200" exitCode=0 Dec 04 15:25:33 crc kubenswrapper[4946]: I1204 15:25:33.215906 4946 generic.go:334] "Generic (PLEG): container finished" podID="717f061d-c408-470b-982d-059f1cd4c93e" containerID="b682c638c90256015b275bada9fe1818f3c55c135acb03afea9929bd1209c7d4" exitCode=143 Dec 04 15:25:33 crc kubenswrapper[4946]: I1204 15:25:33.216000 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"717f061d-c408-470b-982d-059f1cd4c93e","Type":"ContainerDied","Data":"85d2f3eb50387172b619e3e40cf7400587256d7a960fd108d3d7ce590875c200"} Dec 04 15:25:33 crc kubenswrapper[4946]: I1204 15:25:33.216060 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"717f061d-c408-470b-982d-059f1cd4c93e","Type":"ContainerDied","Data":"b682c638c90256015b275bada9fe1818f3c55c135acb03afea9929bd1209c7d4"} Dec 04 15:25:33 crc kubenswrapper[4946]: I1204 15:25:33.256350 4946 scope.go:117] "RemoveContainer" containerID="524ed05af88c7256deb0ddf998a5a9b4e0a80e273636f72d746234bc88c939e4" Dec 04 15:25:33 crc kubenswrapper[4946]: E1204 15:25:33.273956 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96aa62470cc13084bd513c6d04189c795a7f8a0a359dcfc4789f76c936147981\": container with ID starting with 96aa62470cc13084bd513c6d04189c795a7f8a0a359dcfc4789f76c936147981 not found: ID does not exist" containerID="96aa62470cc13084bd513c6d04189c795a7f8a0a359dcfc4789f76c936147981" Dec 04 15:25:33 crc kubenswrapper[4946]: I1204 15:25:33.458587 4946 scope.go:117] "RemoveContainer" containerID="fb52bd5d1ea1de828858d205cb57cefe2050c99e6f47135ad23e3e1e41ef6787" Dec 04 15:25:33 crc kubenswrapper[4946]: I1204 15:25:33.765177 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 15:25:33 crc kubenswrapper[4946]: W1204 15:25:33.788099 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb133f81_1fe2_4e36_8663_8301e9373627.slice/crio-3c40c72e18dff80747c14ceca3faf43ab03bee9ececa50b38563336c32a4acca WatchSource:0}: Error finding container 3c40c72e18dff80747c14ceca3faf43ab03bee9ececa50b38563336c32a4acca: Status 404 returned error can't find the container with id 3c40c72e18dff80747c14ceca3faf43ab03bee9ececa50b38563336c32a4acca Dec 04 15:25:33 crc kubenswrapper[4946]: I1204 15:25:33.881752 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-b7bc594d8-sjpg5" Dec 04 15:25:33 crc kubenswrapper[4946]: I1204 15:25:33.971331 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 04 15:25:33 crc kubenswrapper[4946]: I1204 15:25:33.993786 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.116362 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-certs\") pod \"717f061d-c408-470b-982d-059f1cd4c93e\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.116740 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-scripts\") pod \"717f061d-c408-470b-982d-059f1cd4c93e\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.116789 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dz2fg\" (UniqueName: \"kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-kube-api-access-dz2fg\") pod \"717f061d-c408-470b-982d-059f1cd4c93e\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.116821 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-combined-ca-bundle\") pod \"717f061d-c408-470b-982d-059f1cd4c93e\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.116951 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/717f061d-c408-470b-982d-059f1cd4c93e-logs\") pod \"717f061d-c408-470b-982d-059f1cd4c93e\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.117017 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data-custom\") pod \"717f061d-c408-470b-982d-059f1cd4c93e\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.117155 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data\") pod \"717f061d-c408-470b-982d-059f1cd4c93e\" (UID: \"717f061d-c408-470b-982d-059f1cd4c93e\") " Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.120862 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/717f061d-c408-470b-982d-059f1cd4c93e-logs" (OuterVolumeSpecName: "logs") pod "717f061d-c408-470b-982d-059f1cd4c93e" (UID: "717f061d-c408-470b-982d-059f1cd4c93e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.131294 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-scripts" (OuterVolumeSpecName: "scripts") pod "717f061d-c408-470b-982d-059f1cd4c93e" (UID: "717f061d-c408-470b-982d-059f1cd4c93e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.131484 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-certs" (OuterVolumeSpecName: "certs") pod "717f061d-c408-470b-982d-059f1cd4c93e" (UID: "717f061d-c408-470b-982d-059f1cd4c93e"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.131461 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "717f061d-c408-470b-982d-059f1cd4c93e" (UID: "717f061d-c408-470b-982d-059f1cd4c93e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.132363 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-kube-api-access-dz2fg" (OuterVolumeSpecName: "kube-api-access-dz2fg") pod "717f061d-c408-470b-982d-059f1cd4c93e" (UID: "717f061d-c408-470b-982d-059f1cd4c93e"). InnerVolumeSpecName "kube-api-access-dz2fg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.162323 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data" (OuterVolumeSpecName: "config-data") pod "717f061d-c408-470b-982d-059f1cd4c93e" (UID: "717f061d-c408-470b-982d-059f1cd4c93e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.170857 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "717f061d-c408-470b-982d-059f1cd4c93e" (UID: "717f061d-c408-470b-982d-059f1cd4c93e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.230416 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dz2fg\" (UniqueName: \"kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-kube-api-access-dz2fg\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.230473 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.230483 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/717f061d-c408-470b-982d-059f1cd4c93e-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.230494 4946 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.230507 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.230516 4946 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/717f061d-c408-470b-982d-059f1cd4c93e-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.230529 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/717f061d-c408-470b-982d-059f1cd4c93e-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.302813 4946 generic.go:334] "Generic (PLEG): container finished" podID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerID="79bd221dcfa9dfd38a6d062623f7ca01093e48f041e6ec5341e9b5d2be32fded" exitCode=143 Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.303472 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d69c8-m42w5" event={"ID":"8e01116c-b2f6-4512-a06e-8c2bb9710fb3","Type":"ContainerDied","Data":"79bd221dcfa9dfd38a6d062623f7ca01093e48f041e6ec5341e9b5d2be32fded"} Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.307231 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7472ebe-1d3e-4e70-bb29-f86ee17843a1","Type":"ContainerStarted","Data":"fd4e4477ae01835b402ca097541e686490f0e768dbe48567bab5de8bafe8f9da"} Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.325275 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"717f061d-c408-470b-982d-059f1cd4c93e","Type":"ContainerDied","Data":"24926806ce6999ca3721fe0ed76540a9104bcdbf0ae3fe7c2f8dde702ad3a02c"} Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.325342 4946 scope.go:117] "RemoveContainer" containerID="85d2f3eb50387172b619e3e40cf7400587256d7a960fd108d3d7ce590875c200" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.325490 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.335664 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"aa433d65-79b3-4465-86db-4d631aed9396","Type":"ContainerStarted","Data":"48532c332fed1b91986863e8ae72b7652f9a17fc3aa3ac73a7472a92aff26f4e"} Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.351988 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fb133f81-1fe2-4e36-8663-8301e9373627","Type":"ContainerStarted","Data":"3c40c72e18dff80747c14ceca3faf43ab03bee9ececa50b38563336c32a4acca"} Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.366090 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=3.301802306 podStartE2EDuration="7.366068345s" podCreationTimestamp="2025-12-04 15:25:27 +0000 UTC" firstStartedPulling="2025-12-04 15:25:28.685347582 +0000 UTC m=+1379.571391223" lastFinishedPulling="2025-12-04 15:25:32.749613631 +0000 UTC m=+1383.635657262" observedRunningTime="2025-12-04 15:25:34.360362351 +0000 UTC m=+1385.246405992" watchObservedRunningTime="2025-12-04 15:25:34.366068345 +0000 UTC m=+1385.252111986" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.412210 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.430405 4946 scope.go:117] "RemoveContainer" containerID="b682c638c90256015b275bada9fe1818f3c55c135acb03afea9929bd1209c7d4" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.441194 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.466337 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.479423 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:25:34 crc kubenswrapper[4946]: E1204 15:25:34.480165 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="717f061d-c408-470b-982d-059f1cd4c93e" containerName="cloudkitty-api" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.480185 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="717f061d-c408-470b-982d-059f1cd4c93e" containerName="cloudkitty-api" Dec 04 15:25:34 crc kubenswrapper[4946]: E1204 15:25:34.480216 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="717f061d-c408-470b-982d-059f1cd4c93e" containerName="cloudkitty-api-log" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.480223 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="717f061d-c408-470b-982d-059f1cd4c93e" containerName="cloudkitty-api-log" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.480504 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="717f061d-c408-470b-982d-059f1cd4c93e" containerName="cloudkitty-api-log" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.480522 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="717f061d-c408-470b-982d-059f1cd4c93e" containerName="cloudkitty-api" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.482161 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.488019 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.489326 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.489806 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.535364 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.653367 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.653439 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-certs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.653480 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.653524 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njv2d\" (UniqueName: \"kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-kube-api-access-njv2d\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.653583 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.653624 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.653640 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-logs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.653665 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-scripts\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.653693 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.756501 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.756947 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njv2d\" (UniqueName: \"kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-kube-api-access-njv2d\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.757017 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.757055 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.757078 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-logs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.757108 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-scripts\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.757154 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.757231 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.757267 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-certs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.758349 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-logs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.763606 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.788155 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-certs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.788624 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.791975 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.793521 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.797269 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-scripts\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.797305 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njv2d\" (UniqueName: \"kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-kube-api-access-njv2d\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.798838 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " pod="openstack/cloudkitty-api-0" Dec 04 15:25:34 crc kubenswrapper[4946]: I1204 15:25:34.841963 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 04 15:25:35 crc kubenswrapper[4946]: I1204 15:25:35.393352 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fb133f81-1fe2-4e36-8663-8301e9373627","Type":"ContainerStarted","Data":"b157ba2790d0047050ddac2aa3e37a144a37156843cc60c2fc86e4e926bbbdef"} Dec 04 15:25:35 crc kubenswrapper[4946]: I1204 15:25:35.404413 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7472ebe-1d3e-4e70-bb29-f86ee17843a1","Type":"ContainerStarted","Data":"52ed8f8114e0ab3d840038dd25397c4b6473749bc94df28426b955b2aa4b1b36"} Dec 04 15:25:35 crc kubenswrapper[4946]: I1204 15:25:35.521264 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="717f061d-c408-470b-982d-059f1cd4c93e" path="/var/lib/kubelet/pods/717f061d-c408-470b-982d-059f1cd4c93e/volumes" Dec 04 15:25:35 crc kubenswrapper[4946]: I1204 15:25:35.522407 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.546223 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fb133f81-1fe2-4e36-8663-8301e9373627","Type":"ContainerStarted","Data":"c0883e750da72cf670c22e75a2383b5285f9658bc21a5b1ef525129729b34ee5"} Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.579679 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="aa433d65-79b3-4465-86db-4d631aed9396" containerName="cloudkitty-proc" containerID="cri-o://48532c332fed1b91986863e8ae72b7652f9a17fc3aa3ac73a7472a92aff26f4e" gracePeriod=30 Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.579878 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e","Type":"ContainerStarted","Data":"20c6a5c355ad98a2f16639306aa8b40b10417c10db02f2eabdd0e1c6b4cb4c19"} Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.579908 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e","Type":"ContainerStarted","Data":"ae34b3ada987931401267c423cd54fb8fdae51a8c23a4b9495543cfc56796f5e"} Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.592706 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.592679303 podStartE2EDuration="5.592679303s" podCreationTimestamp="2025-12-04 15:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:36.584659067 +0000 UTC m=+1387.470702708" watchObservedRunningTime="2025-12-04 15:25:36.592679303 +0000 UTC m=+1387.478722944" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.645847 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.647713 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.652662 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.652882 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.671216 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-dlp9z" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.692358 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.753243 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.755801 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c0e428-98ad-4bda-aba1-685f1b5c8009-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.756109 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w482\" (UniqueName: \"kubernetes.io/projected/b5c0e428-98ad-4bda-aba1-685f1b5c8009-kube-api-access-4w482\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.770494 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b5c0e428-98ad-4bda-aba1-685f1b5c8009-openstack-config\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.770569 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b5c0e428-98ad-4bda-aba1-685f1b5c8009-openstack-config-secret\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.872574 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w482\" (UniqueName: \"kubernetes.io/projected/b5c0e428-98ad-4bda-aba1-685f1b5c8009-kube-api-access-4w482\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.873360 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b5c0e428-98ad-4bda-aba1-685f1b5c8009-openstack-config\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.873458 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b5c0e428-98ad-4bda-aba1-685f1b5c8009-openstack-config-secret\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.873548 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c0e428-98ad-4bda-aba1-685f1b5c8009-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.877313 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b5c0e428-98ad-4bda-aba1-685f1b5c8009-openstack-config\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.928034 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w482\" (UniqueName: \"kubernetes.io/projected/b5c0e428-98ad-4bda-aba1-685f1b5c8009-kube-api-access-4w482\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.928407 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b5c0e428-98ad-4bda-aba1-685f1b5c8009-openstack-config-secret\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:36 crc kubenswrapper[4946]: I1204 15:25:36.934807 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c0e428-98ad-4bda-aba1-685f1b5c8009-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b5c0e428-98ad-4bda-aba1-685f1b5c8009\") " pod="openstack/openstackclient" Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.000687 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.458974 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5c78d69c8-m42w5" podUID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.177:9311/healthcheck\": dial tcp 10.217.0.177:9311: connect: connection refused" Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.459668 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5c78d69c8-m42w5" podUID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.177:9311/healthcheck\": dial tcp 10.217.0.177:9311: connect: connection refused" Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.680892 4946 generic.go:334] "Generic (PLEG): container finished" podID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerID="c5b65fdc664524ce0d0ef7529d4feff4767b17aa21281ae9efef589cf3366a2c" exitCode=0 Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.681006 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d69c8-m42w5" event={"ID":"8e01116c-b2f6-4512-a06e-8c2bb9710fb3","Type":"ContainerDied","Data":"c5b65fdc664524ce0d0ef7529d4feff4767b17aa21281ae9efef589cf3366a2c"} Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.719067 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e","Type":"ContainerStarted","Data":"eef3b33fa7fae5fe286a436c2342eeaa25e665d5c4f013a1896cca4e902fff38"} Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.720334 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.774333 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=3.774306413 podStartE2EDuration="3.774306413s" podCreationTimestamp="2025-12-04 15:25:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:37.763406579 +0000 UTC m=+1388.649450240" watchObservedRunningTime="2025-12-04 15:25:37.774306413 +0000 UTC m=+1388.660350064" Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.785030 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.792701 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7472ebe-1d3e-4e70-bb29-f86ee17843a1","Type":"ContainerStarted","Data":"ac59a78316d9af6cbb656acb076c2685eb11be8ac467ff688659bf2f1908a8bf"} Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.830253 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:25:37 crc kubenswrapper[4946]: W1204 15:25:37.850312 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c0e428_98ad_4bda_aba1_685f1b5c8009.slice/crio-c5565a544934019988da1afdfd20c54187bb3f3d6a055682f5b4f1c203c5d157 WatchSource:0}: Error finding container c5565a544934019988da1afdfd20c54187bb3f3d6a055682f5b4f1c203c5d157: Status 404 returned error can't find the container with id c5565a544934019988da1afdfd20c54187bb3f3d6a055682f5b4f1c203c5d157 Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.942666 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-msmr9"] Dec 04 15:25:37 crc kubenswrapper[4946]: I1204 15:25:37.947722 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" podUID="e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" containerName="dnsmasq-dns" containerID="cri-o://d73f51f2bfc21958963cbd4bd41d0d72350ef5cac03b0b891b42ebbcc75a7399" gracePeriod=10 Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.731029 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.832573 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d69c8-m42w5" event={"ID":"8e01116c-b2f6-4512-a06e-8c2bb9710fb3","Type":"ContainerDied","Data":"01cdcdd681fb7bc4ba169c2bd7d6fce08220fa858f945630e45d81209e64527f"} Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.832635 4946 scope.go:117] "RemoveContainer" containerID="c5b65fdc664524ce0d0ef7529d4feff4767b17aa21281ae9efef589cf3366a2c" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.832924 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c78d69c8-m42w5" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.858098 4946 generic.go:334] "Generic (PLEG): container finished" podID="e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" containerID="d73f51f2bfc21958963cbd4bd41d0d72350ef5cac03b0b891b42ebbcc75a7399" exitCode=0 Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.858237 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" event={"ID":"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2","Type":"ContainerDied","Data":"d73f51f2bfc21958963cbd4bd41d0d72350ef5cac03b0b891b42ebbcc75a7399"} Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.858268 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" event={"ID":"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2","Type":"ContainerDied","Data":"1a848226f03d18b8add738482f7fdcec3a7d584c40d0cbae96a5aa13f3ee7843"} Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.858283 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a848226f03d18b8add738482f7fdcec3a7d584c40d0cbae96a5aa13f3ee7843" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.859538 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data\") pod \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.859674 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data-custom\") pod \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.859738 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrm4k\" (UniqueName: \"kubernetes.io/projected/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-kube-api-access-jrm4k\") pod \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.859981 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-logs\") pod \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.860030 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-combined-ca-bundle\") pod \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\" (UID: \"8e01116c-b2f6-4512-a06e-8c2bb9710fb3\") " Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.862651 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-logs" (OuterVolumeSpecName: "logs") pod "8e01116c-b2f6-4512-a06e-8c2bb9710fb3" (UID: "8e01116c-b2f6-4512-a06e-8c2bb9710fb3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.868587 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-kube-api-access-jrm4k" (OuterVolumeSpecName: "kube-api-access-jrm4k") pod "8e01116c-b2f6-4512-a06e-8c2bb9710fb3" (UID: "8e01116c-b2f6-4512-a06e-8c2bb9710fb3"). InnerVolumeSpecName "kube-api-access-jrm4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.889884 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8e01116c-b2f6-4512-a06e-8c2bb9710fb3" (UID: "8e01116c-b2f6-4512-a06e-8c2bb9710fb3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.890566 4946 scope.go:117] "RemoveContainer" containerID="79bd221dcfa9dfd38a6d062623f7ca01093e48f041e6ec5341e9b5d2be32fded" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.894393 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.898810 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7472ebe-1d3e-4e70-bb29-f86ee17843a1","Type":"ContainerStarted","Data":"1a8a667659d031009bf0d47328731ec7295d9fa242a22987f925505067d1c0bc"} Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.911967 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b5c0e428-98ad-4bda-aba1-685f1b5c8009","Type":"ContainerStarted","Data":"c5565a544934019988da1afdfd20c54187bb3f3d6a055682f5b4f1c203c5d157"} Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.952984 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8e01116c-b2f6-4512-a06e-8c2bb9710fb3" (UID: "8e01116c-b2f6-4512-a06e-8c2bb9710fb3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.964352 4946 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.964399 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrm4k\" (UniqueName: \"kubernetes.io/projected/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-kube-api-access-jrm4k\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.964414 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:38 crc kubenswrapper[4946]: I1204 15:25:38.964431 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.043309 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data" (OuterVolumeSpecName: "config-data") pod "8e01116c-b2f6-4512-a06e-8c2bb9710fb3" (UID: "8e01116c-b2f6-4512-a06e-8c2bb9710fb3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.070665 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-swift-storage-0\") pod \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.070839 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pb2zf\" (UniqueName: \"kubernetes.io/projected/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-kube-api-access-pb2zf\") pod \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.070912 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-svc\") pod \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.070941 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-nb\") pod \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.070987 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-config\") pod \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.071056 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-sb\") pod \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\" (UID: \"e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2\") " Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.071754 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e01116c-b2f6-4512-a06e-8c2bb9710fb3-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.091362 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-kube-api-access-pb2zf" (OuterVolumeSpecName: "kube-api-access-pb2zf") pod "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" (UID: "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2"). InnerVolumeSpecName "kube-api-access-pb2zf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.176128 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pb2zf\" (UniqueName: \"kubernetes.io/projected/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-kube-api-access-pb2zf\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.185558 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-config" (OuterVolumeSpecName: "config") pod "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" (UID: "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.217281 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" (UID: "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.234998 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5c78d69c8-m42w5"] Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.243469 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5c78d69c8-m42w5"] Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.295756 4946 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.295818 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.298828 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" (UID: "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.303365 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" (UID: "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.313022 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" (UID: "e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.399405 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.399452 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.399470 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.469362 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" path="/var/lib/kubelet/pods/8e01116c-b2f6-4512-a06e-8c2bb9710fb3/volumes" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.632305 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.180:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.937165 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-msmr9" Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.969495 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-msmr9"] Dec 04 15:25:39 crc kubenswrapper[4946]: I1204 15:25:39.984746 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-msmr9"] Dec 04 15:25:41 crc kubenswrapper[4946]: I1204 15:25:41.475671 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" path="/var/lib/kubelet/pods/e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2/volumes" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.002091 4946 generic.go:334] "Generic (PLEG): container finished" podID="aa433d65-79b3-4465-86db-4d631aed9396" containerID="48532c332fed1b91986863e8ae72b7652f9a17fc3aa3ac73a7472a92aff26f4e" exitCode=0 Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.002175 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"aa433d65-79b3-4465-86db-4d631aed9396","Type":"ContainerDied","Data":"48532c332fed1b91986863e8ae72b7652f9a17fc3aa3ac73a7472a92aff26f4e"} Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.184138 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.424581 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.591351 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-combined-ca-bundle\") pod \"aa433d65-79b3-4465-86db-4d631aed9396\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.591810 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx4pt\" (UniqueName: \"kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-kube-api-access-jx4pt\") pod \"aa433d65-79b3-4465-86db-4d631aed9396\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.591880 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data\") pod \"aa433d65-79b3-4465-86db-4d631aed9396\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.591937 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-scripts\") pod \"aa433d65-79b3-4465-86db-4d631aed9396\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.592052 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-certs\") pod \"aa433d65-79b3-4465-86db-4d631aed9396\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.592181 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data-custom\") pod \"aa433d65-79b3-4465-86db-4d631aed9396\" (UID: \"aa433d65-79b3-4465-86db-4d631aed9396\") " Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.598544 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-certs" (OuterVolumeSpecName: "certs") pod "aa433d65-79b3-4465-86db-4d631aed9396" (UID: "aa433d65-79b3-4465-86db-4d631aed9396"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.600196 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "aa433d65-79b3-4465-86db-4d631aed9396" (UID: "aa433d65-79b3-4465-86db-4d631aed9396"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.600339 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-kube-api-access-jx4pt" (OuterVolumeSpecName: "kube-api-access-jx4pt") pod "aa433d65-79b3-4465-86db-4d631aed9396" (UID: "aa433d65-79b3-4465-86db-4d631aed9396"). InnerVolumeSpecName "kube-api-access-jx4pt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.600650 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-scripts" (OuterVolumeSpecName: "scripts") pod "aa433d65-79b3-4465-86db-4d631aed9396" (UID: "aa433d65-79b3-4465-86db-4d631aed9396"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.639179 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa433d65-79b3-4465-86db-4d631aed9396" (UID: "aa433d65-79b3-4465-86db-4d631aed9396"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.645570 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data" (OuterVolumeSpecName: "config-data") pod "aa433d65-79b3-4465-86db-4d631aed9396" (UID: "aa433d65-79b3-4465-86db-4d631aed9396"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.695217 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.695268 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx4pt\" (UniqueName: \"kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-kube-api-access-jx4pt\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.695284 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.695292 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.695302 4946 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/aa433d65-79b3-4465-86db-4d631aed9396-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:42 crc kubenswrapper[4946]: I1204 15:25:42.695310 4946 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa433d65-79b3-4465-86db-4d631aed9396-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.048321 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.048672 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"aa433d65-79b3-4465-86db-4d631aed9396","Type":"ContainerDied","Data":"d59ec63d3f5b6443fe9adcb3b888f0f97cbd247a310ec7c915de7eb8d67ddf8a"} Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.048744 4946 scope.go:117] "RemoveContainer" containerID="48532c332fed1b91986863e8ae72b7652f9a17fc3aa3ac73a7472a92aff26f4e" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.098036 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7472ebe-1d3e-4e70-bb29-f86ee17843a1","Type":"ContainerStarted","Data":"cab4df4dfd92a18cd7252d7599c7957a030ebc3044545bb0669882310ada8fb6"} Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.110638 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.141177 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.221152729 podStartE2EDuration="12.141160262s" podCreationTimestamp="2025-12-04 15:25:31 +0000 UTC" firstStartedPulling="2025-12-04 15:25:34.021468022 +0000 UTC m=+1384.907511663" lastFinishedPulling="2025-12-04 15:25:41.941475555 +0000 UTC m=+1392.827519196" observedRunningTime="2025-12-04 15:25:43.137719759 +0000 UTC m=+1394.023763400" watchObservedRunningTime="2025-12-04 15:25:43.141160262 +0000 UTC m=+1394.027203893" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.229836 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.287252 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.342193 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:25:43 crc kubenswrapper[4946]: E1204 15:25:43.342804 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerName="barbican-api-log" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.342826 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerName="barbican-api-log" Dec 04 15:25:43 crc kubenswrapper[4946]: E1204 15:25:43.342843 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa433d65-79b3-4465-86db-4d631aed9396" containerName="cloudkitty-proc" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.342850 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa433d65-79b3-4465-86db-4d631aed9396" containerName="cloudkitty-proc" Dec 04 15:25:43 crc kubenswrapper[4946]: E1204 15:25:43.342872 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerName="barbican-api" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.342878 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerName="barbican-api" Dec 04 15:25:43 crc kubenswrapper[4946]: E1204 15:25:43.342893 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" containerName="dnsmasq-dns" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.342899 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" containerName="dnsmasq-dns" Dec 04 15:25:43 crc kubenswrapper[4946]: E1204 15:25:43.342909 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" containerName="init" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.342915 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" containerName="init" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.343158 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e996f27c-cd9b-4c7d-bf35-ed9e02f5e2b2" containerName="dnsmasq-dns" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.343175 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa433d65-79b3-4465-86db-4d631aed9396" containerName="cloudkitty-proc" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.343191 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerName="barbican-api-log" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.343198 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e01116c-b2f6-4512-a06e-8c2bb9710fb3" containerName="barbican-api" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.345366 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.348703 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.364499 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.414230 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.414348 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.414415 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbpz5\" (UniqueName: \"kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-kube-api-access-gbpz5\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.414523 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-certs\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.414575 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.414644 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-scripts\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.486216 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa433d65-79b3-4465-86db-4d631aed9396" path="/var/lib/kubelet/pods/aa433d65-79b3-4465-86db-4d631aed9396/volumes" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.516820 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbpz5\" (UniqueName: \"kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-kube-api-access-gbpz5\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.516889 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-certs\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.516910 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.516938 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-scripts\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.517032 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.517086 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.539828 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-scripts\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.539863 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.543619 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.555871 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbpz5\" (UniqueName: \"kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-kube-api-access-gbpz5\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.556037 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.556348 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-certs\") pod \"cloudkitty-proc-0\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:25:43 crc kubenswrapper[4946]: E1204 15:25:43.564063 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa433d65_79b3_4465_86db_4d631aed9396.slice/crio-d59ec63d3f5b6443fe9adcb3b888f0f97cbd247a310ec7c915de7eb8d67ddf8a\": RecentStats: unable to find data in memory cache]" Dec 04 15:25:43 crc kubenswrapper[4946]: I1204 15:25:43.697585 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 04 15:25:44 crc kubenswrapper[4946]: I1204 15:25:44.249836 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:25:44 crc kubenswrapper[4946]: W1204 15:25:44.254540 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb21e37e6_3f4e_497d_9b6f_1f11f9d284a8.slice/crio-946aab3ec23fa8abc17be2005dc05ac84deb8aa761aff5fe6ca065b532ea9b69 WatchSource:0}: Error finding container 946aab3ec23fa8abc17be2005dc05ac84deb8aa761aff5fe6ca065b532ea9b69: Status 404 returned error can't find the container with id 946aab3ec23fa8abc17be2005dc05ac84deb8aa761aff5fe6ca065b532ea9b69 Dec 04 15:25:45 crc kubenswrapper[4946]: I1204 15:25:45.158760 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8","Type":"ContainerStarted","Data":"946aab3ec23fa8abc17be2005dc05ac84deb8aa761aff5fe6ca065b532ea9b69"} Dec 04 15:25:46 crc kubenswrapper[4946]: I1204 15:25:46.183800 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8","Type":"ContainerStarted","Data":"dd5e41737b51f633aaca90f9ecaa0b928cce92138cf784d39258af4f082fc0f5"} Dec 04 15:25:46 crc kubenswrapper[4946]: I1204 15:25:46.213980 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=3.213951524 podStartE2EDuration="3.213951524s" podCreationTimestamp="2025-12-04 15:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:25:46.208553579 +0000 UTC m=+1397.094597220" watchObservedRunningTime="2025-12-04 15:25:46.213951524 +0000 UTC m=+1397.099995165" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.473003 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-8679d7877f-2wbn9"] Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.475452 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.480474 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.481173 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.488314 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-8679d7877f-2wbn9"] Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.497621 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.565007 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-combined-ca-bundle\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.565532 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-public-tls-certs\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.565650 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dzvh\" (UniqueName: \"kubernetes.io/projected/bec308a1-7b44-4153-a863-7b9755407899-kube-api-access-9dzvh\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.566064 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-internal-tls-certs\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.566372 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bec308a1-7b44-4153-a863-7b9755407899-etc-swift\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.566446 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bec308a1-7b44-4153-a863-7b9755407899-log-httpd\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.566510 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-config-data\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.566668 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bec308a1-7b44-4153-a863-7b9755407899-run-httpd\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.670075 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-internal-tls-certs\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.670494 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bec308a1-7b44-4153-a863-7b9755407899-etc-swift\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.670597 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bec308a1-7b44-4153-a863-7b9755407899-log-httpd\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.670698 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-config-data\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.670818 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bec308a1-7b44-4153-a863-7b9755407899-run-httpd\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.671155 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-combined-ca-bundle\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.671341 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-public-tls-certs\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.671494 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dzvh\" (UniqueName: \"kubernetes.io/projected/bec308a1-7b44-4153-a863-7b9755407899-kube-api-access-9dzvh\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.671775 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bec308a1-7b44-4153-a863-7b9755407899-log-httpd\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.672506 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bec308a1-7b44-4153-a863-7b9755407899-run-httpd\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.679747 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-config-data\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.683546 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-internal-tls-certs\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.684319 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-combined-ca-bundle\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.717065 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bec308a1-7b44-4153-a863-7b9755407899-etc-swift\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.722927 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bec308a1-7b44-4153-a863-7b9755407899-public-tls-certs\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.728268 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dzvh\" (UniqueName: \"kubernetes.io/projected/bec308a1-7b44-4153-a863-7b9755407899-kube-api-access-9dzvh\") pod \"swift-proxy-8679d7877f-2wbn9\" (UID: \"bec308a1-7b44-4153-a863-7b9755407899\") " pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:48 crc kubenswrapper[4946]: I1204 15:25:48.813995 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:25:49 crc kubenswrapper[4946]: I1204 15:25:49.577759 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:25:49 crc kubenswrapper[4946]: I1204 15:25:49.578796 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="proxy-httpd" containerID="cri-o://cab4df4dfd92a18cd7252d7599c7957a030ebc3044545bb0669882310ada8fb6" gracePeriod=30 Dec 04 15:25:49 crc kubenswrapper[4946]: I1204 15:25:49.578909 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="sg-core" containerID="cri-o://1a8a667659d031009bf0d47328731ec7295d9fa242a22987f925505067d1c0bc" gracePeriod=30 Dec 04 15:25:49 crc kubenswrapper[4946]: I1204 15:25:49.578713 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="ceilometer-central-agent" containerID="cri-o://52ed8f8114e0ab3d840038dd25397c4b6473749bc94df28426b955b2aa4b1b36" gracePeriod=30 Dec 04 15:25:49 crc kubenswrapper[4946]: I1204 15:25:49.578989 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="ceilometer-notification-agent" containerID="cri-o://ac59a78316d9af6cbb656acb076c2685eb11be8ac467ff688659bf2f1908a8bf" gracePeriod=30 Dec 04 15:25:50 crc kubenswrapper[4946]: I1204 15:25:50.255330 4946 generic.go:334] "Generic (PLEG): container finished" podID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerID="cab4df4dfd92a18cd7252d7599c7957a030ebc3044545bb0669882310ada8fb6" exitCode=0 Dec 04 15:25:50 crc kubenswrapper[4946]: I1204 15:25:50.255391 4946 generic.go:334] "Generic (PLEG): container finished" podID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerID="1a8a667659d031009bf0d47328731ec7295d9fa242a22987f925505067d1c0bc" exitCode=2 Dec 04 15:25:50 crc kubenswrapper[4946]: I1204 15:25:50.255421 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7472ebe-1d3e-4e70-bb29-f86ee17843a1","Type":"ContainerDied","Data":"cab4df4dfd92a18cd7252d7599c7957a030ebc3044545bb0669882310ada8fb6"} Dec 04 15:25:50 crc kubenswrapper[4946]: I1204 15:25:50.255479 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7472ebe-1d3e-4e70-bb29-f86ee17843a1","Type":"ContainerDied","Data":"1a8a667659d031009bf0d47328731ec7295d9fa242a22987f925505067d1c0bc"} Dec 04 15:25:51 crc kubenswrapper[4946]: I1204 15:25:51.275326 4946 generic.go:334] "Generic (PLEG): container finished" podID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerID="0127d815e1b4411476f87ac6c09fd13ed369b5ffa64e9597645717208c7168d9" exitCode=137 Dec 04 15:25:51 crc kubenswrapper[4946]: I1204 15:25:51.275404 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd","Type":"ContainerDied","Data":"0127d815e1b4411476f87ac6c09fd13ed369b5ffa64e9597645717208c7168d9"} Dec 04 15:25:51 crc kubenswrapper[4946]: I1204 15:25:51.280738 4946 generic.go:334] "Generic (PLEG): container finished" podID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerID="52ed8f8114e0ab3d840038dd25397c4b6473749bc94df28426b955b2aa4b1b36" exitCode=0 Dec 04 15:25:51 crc kubenswrapper[4946]: I1204 15:25:51.280799 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7472ebe-1d3e-4e70-bb29-f86ee17843a1","Type":"ContainerDied","Data":"52ed8f8114e0ab3d840038dd25397c4b6473749bc94df28426b955b2aa4b1b36"} Dec 04 15:25:52 crc kubenswrapper[4946]: I1204 15:25:52.295713 4946 generic.go:334] "Generic (PLEG): container finished" podID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerID="ac59a78316d9af6cbb656acb076c2685eb11be8ac467ff688659bf2f1908a8bf" exitCode=0 Dec 04 15:25:52 crc kubenswrapper[4946]: I1204 15:25:52.295784 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7472ebe-1d3e-4e70-bb29-f86ee17843a1","Type":"ContainerDied","Data":"ac59a78316d9af6cbb656acb076c2685eb11be8ac467ff688659bf2f1908a8bf"} Dec 04 15:25:52 crc kubenswrapper[4946]: I1204 15:25:52.478585 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:25:52 crc kubenswrapper[4946]: I1204 15:25:52.478671 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:25:54 crc kubenswrapper[4946]: I1204 15:25:54.577024 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.180:8776/healthcheck\": dial tcp 10.217.0.180:8776: connect: connection refused" Dec 04 15:25:55 crc kubenswrapper[4946]: I1204 15:25:55.765338 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:25:55 crc kubenswrapper[4946]: I1204 15:25:55.766045 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerName="glance-log" containerID="cri-o://2012190c874c38dc1fcf608c073022d9d04b51c5f1049415ab6b9593e4e9b297" gracePeriod=30 Dec 04 15:25:55 crc kubenswrapper[4946]: I1204 15:25:55.766606 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerName="glance-httpd" containerID="cri-o://0c85f1b8b663d3558698ef8619bbbb0c2bbb53cc3a3c084c0f23d12eb3f52652" gracePeriod=30 Dec 04 15:25:55 crc kubenswrapper[4946]: I1204 15:25:55.785353 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.170:9292/healthcheck\": EOF" Dec 04 15:25:55 crc kubenswrapper[4946]: I1204 15:25:55.785654 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.170:9292/healthcheck\": EOF" Dec 04 15:25:56 crc kubenswrapper[4946]: E1204 15:25:56.181300 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified" Dec 04 15:25:56 crc kubenswrapper[4946]: E1204 15:25:56.183269 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nf8h58dh68h5c9hcbhcfh56bh54bh547h76h56ch655h85h654h54fh65dh659h68ch59h7dh5fbh54bh76h5d8hcch5c5h56ch5d7hfch695hc9h546q,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_CA_CERT,Value:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4w482,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(b5c0e428-98ad-4bda-aba1-685f1b5c8009): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:25:56 crc kubenswrapper[4946]: E1204 15:25:56.185289 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="b5c0e428-98ad-4bda-aba1-685f1b5c8009" Dec 04 15:25:56 crc kubenswrapper[4946]: I1204 15:25:56.404397 4946 generic.go:334] "Generic (PLEG): container finished" podID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerID="2012190c874c38dc1fcf608c073022d9d04b51c5f1049415ab6b9593e4e9b297" exitCode=143 Dec 04 15:25:56 crc kubenswrapper[4946]: I1204 15:25:56.404479 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20d2aa22-a98c-483e-b74a-dd549ec45640","Type":"ContainerDied","Data":"2012190c874c38dc1fcf608c073022d9d04b51c5f1049415ab6b9593e4e9b297"} Dec 04 15:25:56 crc kubenswrapper[4946]: E1204 15:25:56.411453 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\"\"" pod="openstack/openstackclient" podUID="b5c0e428-98ad-4bda-aba1-685f1b5c8009" Dec 04 15:25:56 crc kubenswrapper[4946]: I1204 15:25:56.712211 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-8679d7877f-2wbn9"] Dec 04 15:25:56 crc kubenswrapper[4946]: W1204 15:25:56.726026 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbec308a1_7b44_4153_a863_7b9755407899.slice/crio-be7574ad524e9b45fde09262a99ee3fd7b3bdd95c8e78b0b2af2be4e590edf71 WatchSource:0}: Error finding container be7574ad524e9b45fde09262a99ee3fd7b3bdd95c8e78b0b2af2be4e590edf71: Status 404 returned error can't find the container with id be7574ad524e9b45fde09262a99ee3fd7b3bdd95c8e78b0b2af2be4e590edf71 Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.018275 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.050931 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-sg-core-conf-yaml\") pod \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.051067 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-log-httpd\") pod \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.051147 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-config-data\") pod \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.051291 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-scripts\") pod \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.051352 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-combined-ca-bundle\") pod \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.051434 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdxkw\" (UniqueName: \"kubernetes.io/projected/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-kube-api-access-fdxkw\") pod \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.051461 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-run-httpd\") pod \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\" (UID: \"c7472ebe-1d3e-4e70-bb29-f86ee17843a1\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.052334 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c7472ebe-1d3e-4e70-bb29-f86ee17843a1" (UID: "c7472ebe-1d3e-4e70-bb29-f86ee17843a1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.066350 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c7472ebe-1d3e-4e70-bb29-f86ee17843a1" (UID: "c7472ebe-1d3e-4e70-bb29-f86ee17843a1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.079841 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-kube-api-access-fdxkw" (OuterVolumeSpecName: "kube-api-access-fdxkw") pod "c7472ebe-1d3e-4e70-bb29-f86ee17843a1" (UID: "c7472ebe-1d3e-4e70-bb29-f86ee17843a1"). InnerVolumeSpecName "kube-api-access-fdxkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.091329 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-scripts" (OuterVolumeSpecName: "scripts") pod "c7472ebe-1d3e-4e70-bb29-f86ee17843a1" (UID: "c7472ebe-1d3e-4e70-bb29-f86ee17843a1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.159006 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdxkw\" (UniqueName: \"kubernetes.io/projected/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-kube-api-access-fdxkw\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.159049 4946 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.159060 4946 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.159071 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.216449 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c7472ebe-1d3e-4e70-bb29-f86ee17843a1" (UID: "c7472ebe-1d3e-4e70-bb29-f86ee17843a1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.230466 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.260423 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbb9w\" (UniqueName: \"kubernetes.io/projected/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-kube-api-access-fbb9w\") pod \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.260614 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-scripts\") pod \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.260733 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data\") pod \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.260802 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-logs\") pod \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.260840 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-combined-ca-bundle\") pod \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.260866 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data-custom\") pod \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.260968 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-etc-machine-id\") pod \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\" (UID: \"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd\") " Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.261582 4946 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.261673 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" (UID: "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.263038 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-logs" (OuterVolumeSpecName: "logs") pod "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" (UID: "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.281581 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-kube-api-access-fbb9w" (OuterVolumeSpecName: "kube-api-access-fbb9w") pod "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" (UID: "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd"). InnerVolumeSpecName "kube-api-access-fbb9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.281595 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-scripts" (OuterVolumeSpecName: "scripts") pod "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" (UID: "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.285147 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" (UID: "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.297246 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-config-data" (OuterVolumeSpecName: "config-data") pod "c7472ebe-1d3e-4e70-bb29-f86ee17843a1" (UID: "c7472ebe-1d3e-4e70-bb29-f86ee17843a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.315240 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" (UID: "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.334718 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7472ebe-1d3e-4e70-bb29-f86ee17843a1" (UID: "c7472ebe-1d3e-4e70-bb29-f86ee17843a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.338294 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data" (OuterVolumeSpecName: "config-data") pod "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" (UID: "9f87e8c4-e81e-41a7-afc4-7e1e856aaccd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.363733 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.364215 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbb9w\" (UniqueName: \"kubernetes.io/projected/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-kube-api-access-fbb9w\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.364229 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.364238 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7472ebe-1d3e-4e70-bb29-f86ee17843a1-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.364248 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.364257 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.364276 4946 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.364285 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.364294 4946 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.421463 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7472ebe-1d3e-4e70-bb29-f86ee17843a1","Type":"ContainerDied","Data":"fd4e4477ae01835b402ca097541e686490f0e768dbe48567bab5de8bafe8f9da"} Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.421532 4946 scope.go:117] "RemoveContainer" containerID="cab4df4dfd92a18cd7252d7599c7957a030ebc3044545bb0669882310ada8fb6" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.421735 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.434357 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8679d7877f-2wbn9" event={"ID":"bec308a1-7b44-4153-a863-7b9755407899","Type":"ContainerStarted","Data":"be7574ad524e9b45fde09262a99ee3fd7b3bdd95c8e78b0b2af2be4e590edf71"} Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.444074 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f87e8c4-e81e-41a7-afc4-7e1e856aaccd","Type":"ContainerDied","Data":"fa0b47d8979f8341a965e00afe7ff7f4c1576e976667c13aa405159dbc6c1425"} Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.444286 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.548098 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.581197 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.591927 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.614890 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.646249 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:25:57 crc kubenswrapper[4946]: E1204 15:25:57.647083 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="ceilometer-central-agent" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647207 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="ceilometer-central-agent" Dec 04 15:25:57 crc kubenswrapper[4946]: E1204 15:25:57.647250 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerName="cinder-api-log" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647261 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerName="cinder-api-log" Dec 04 15:25:57 crc kubenswrapper[4946]: E1204 15:25:57.647276 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="proxy-httpd" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647288 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="proxy-httpd" Dec 04 15:25:57 crc kubenswrapper[4946]: E1204 15:25:57.647311 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="sg-core" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647320 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="sg-core" Dec 04 15:25:57 crc kubenswrapper[4946]: E1204 15:25:57.647341 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="ceilometer-notification-agent" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647352 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="ceilometer-notification-agent" Dec 04 15:25:57 crc kubenswrapper[4946]: E1204 15:25:57.647376 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerName="cinder-api" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647384 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerName="cinder-api" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647663 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="proxy-httpd" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647688 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="sg-core" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647701 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerName="cinder-api-log" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647714 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" containerName="cinder-api" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647731 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="ceilometer-notification-agent" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.647738 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" containerName="ceilometer-central-agent" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.650398 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.653550 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.653551 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.666963 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.673422 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.674624 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-config-data\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.674672 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-run-httpd\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.674704 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.674743 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.674783 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-scripts\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.674868 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-log-httpd\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.674987 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vt95\" (UniqueName: \"kubernetes.io/projected/511a9cd8-e1dc-4f14-b4df-847fda791a30-kube-api-access-7vt95\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.677672 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.677870 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.677976 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.681638 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.687352 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777183 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0203bca7-1453-4a7b-8597-5286d1d245b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777246 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-log-httpd\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777271 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0203bca7-1453-4a7b-8597-5286d1d245b2-logs\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777396 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vt95\" (UniqueName: \"kubernetes.io/projected/511a9cd8-e1dc-4f14-b4df-847fda791a30-kube-api-access-7vt95\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777436 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-scripts\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777455 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777484 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777500 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-config-data\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777522 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-config-data\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777546 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-run-httpd\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777585 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777620 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t8f5\" (UniqueName: \"kubernetes.io/projected/0203bca7-1453-4a7b-8597-5286d1d245b2-kube-api-access-6t8f5\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777644 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777665 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777690 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-scripts\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777713 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.777837 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-log-httpd\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.778111 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-run-httpd\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.783185 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.783359 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-scripts\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.803434 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.803667 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-config-data\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.806806 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vt95\" (UniqueName: \"kubernetes.io/projected/511a9cd8-e1dc-4f14-b4df-847fda791a30-kube-api-access-7vt95\") pod \"ceilometer-0\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.836662 4946 scope.go:117] "RemoveContainer" containerID="1a8a667659d031009bf0d47328731ec7295d9fa242a22987f925505067d1c0bc" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.879661 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-scripts\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.879748 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.879789 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.879821 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-config-data\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.879869 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t8f5\" (UniqueName: \"kubernetes.io/projected/0203bca7-1453-4a7b-8597-5286d1d245b2-kube-api-access-6t8f5\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.879906 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.879938 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.880001 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0203bca7-1453-4a7b-8597-5286d1d245b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.880022 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0203bca7-1453-4a7b-8597-5286d1d245b2-logs\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.880745 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0203bca7-1453-4a7b-8597-5286d1d245b2-logs\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.883588 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0203bca7-1453-4a7b-8597-5286d1d245b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.885811 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-scripts\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.886036 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-config-data\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.888368 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.888599 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.889941 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.892211 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0203bca7-1453-4a7b-8597-5286d1d245b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.947579 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t8f5\" (UniqueName: \"kubernetes.io/projected/0203bca7-1453-4a7b-8597-5286d1d245b2-kube-api-access-6t8f5\") pod \"cinder-api-0\" (UID: \"0203bca7-1453-4a7b-8597-5286d1d245b2\") " pod="openstack/cinder-api-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.967822 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:25:57 crc kubenswrapper[4946]: I1204 15:25:57.994464 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 15:25:58 crc kubenswrapper[4946]: I1204 15:25:58.070621 4946 scope.go:117] "RemoveContainer" containerID="ac59a78316d9af6cbb656acb076c2685eb11be8ac467ff688659bf2f1908a8bf" Dec 04 15:25:58 crc kubenswrapper[4946]: I1204 15:25:58.117577 4946 scope.go:117] "RemoveContainer" containerID="52ed8f8114e0ab3d840038dd25397c4b6473749bc94df28426b955b2aa4b1b36" Dec 04 15:25:58 crc kubenswrapper[4946]: I1204 15:25:58.150282 4946 scope.go:117] "RemoveContainer" containerID="0127d815e1b4411476f87ac6c09fd13ed369b5ffa64e9597645717208c7168d9" Dec 04 15:25:58 crc kubenswrapper[4946]: I1204 15:25:58.185178 4946 scope.go:117] "RemoveContainer" containerID="0a2472f31a8e5c8d5dc9b0437835174fe608db8fe2f9b004645aea35a3c5ba71" Dec 04 15:25:58 crc kubenswrapper[4946]: I1204 15:25:58.514694 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:25:58 crc kubenswrapper[4946]: W1204 15:25:58.520090 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod511a9cd8_e1dc_4f14_b4df_847fda791a30.slice/crio-4c822496184cab6148427f6b131c312c64110151ab02869a05728914b12bfd3b WatchSource:0}: Error finding container 4c822496184cab6148427f6b131c312c64110151ab02869a05728914b12bfd3b: Status 404 returned error can't find the container with id 4c822496184cab6148427f6b131c312c64110151ab02869a05728914b12bfd3b Dec 04 15:25:58 crc kubenswrapper[4946]: I1204 15:25:58.583624 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 04 15:25:59 crc kubenswrapper[4946]: I1204 15:25:59.498422 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f87e8c4-e81e-41a7-afc4-7e1e856aaccd" path="/var/lib/kubelet/pods/9f87e8c4-e81e-41a7-afc4-7e1e856aaccd/volumes" Dec 04 15:25:59 crc kubenswrapper[4946]: I1204 15:25:59.505161 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7472ebe-1d3e-4e70-bb29-f86ee17843a1" path="/var/lib/kubelet/pods/c7472ebe-1d3e-4e70-bb29-f86ee17843a1/volumes" Dec 04 15:25:59 crc kubenswrapper[4946]: I1204 15:25:59.544470 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"511a9cd8-e1dc-4f14-b4df-847fda791a30","Type":"ContainerStarted","Data":"4c822496184cab6148427f6b131c312c64110151ab02869a05728914b12bfd3b"} Dec 04 15:25:59 crc kubenswrapper[4946]: I1204 15:25:59.561196 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:25:59 crc kubenswrapper[4946]: I1204 15:25:59.561633 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="978d45f1-6556-4486-8175-29a7f68b263a" containerName="glance-httpd" containerID="cri-o://7df9632d6581dd2761d69f8860814b2ebc9925d10e95483d83136bde92417db7" gracePeriod=30 Dec 04 15:25:59 crc kubenswrapper[4946]: I1204 15:25:59.561615 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="978d45f1-6556-4486-8175-29a7f68b263a" containerName="glance-log" containerID="cri-o://934b953f53f98d777b40fbcf75dc9881b567c4b6fbcf42480848bfe6f4d35af1" gracePeriod=30 Dec 04 15:25:59 crc kubenswrapper[4946]: I1204 15:25:59.594421 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8679d7877f-2wbn9" event={"ID":"bec308a1-7b44-4153-a863-7b9755407899","Type":"ContainerStarted","Data":"2e5feccdcefd06c49b8d16cc95278f27e30fea4707be877ec732f002bb94c378"} Dec 04 15:25:59 crc kubenswrapper[4946]: I1204 15:25:59.607183 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0203bca7-1453-4a7b-8597-5286d1d245b2","Type":"ContainerStarted","Data":"bfc0048fa7c971db8b2fa3b28c75f3f0b8d09b747cf905862a0cc041edf7c146"} Dec 04 15:26:00 crc kubenswrapper[4946]: I1204 15:26:00.551858 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:00 crc kubenswrapper[4946]: I1204 15:26:00.654243 4946 generic.go:334] "Generic (PLEG): container finished" podID="978d45f1-6556-4486-8175-29a7f68b263a" containerID="934b953f53f98d777b40fbcf75dc9881b567c4b6fbcf42480848bfe6f4d35af1" exitCode=143 Dec 04 15:26:00 crc kubenswrapper[4946]: I1204 15:26:00.654326 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"978d45f1-6556-4486-8175-29a7f68b263a","Type":"ContainerDied","Data":"934b953f53f98d777b40fbcf75dc9881b567c4b6fbcf42480848bfe6f4d35af1"} Dec 04 15:26:00 crc kubenswrapper[4946]: I1204 15:26:00.655796 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8679d7877f-2wbn9" event={"ID":"bec308a1-7b44-4153-a863-7b9755407899","Type":"ContainerStarted","Data":"b62c9a5125429423adba074e4e9f4701ea0d073a2d376d0ee6d2fbb08ea82695"} Dec 04 15:26:00 crc kubenswrapper[4946]: I1204 15:26:00.658028 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:26:00 crc kubenswrapper[4946]: I1204 15:26:00.658068 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:26:00 crc kubenswrapper[4946]: I1204 15:26:00.661932 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0203bca7-1453-4a7b-8597-5286d1d245b2","Type":"ContainerStarted","Data":"8abe979ceadeb0caa5de2a4c1021530f4cc55e7d1dafaf5f4e53c84ef1d85617"} Dec 04 15:26:00 crc kubenswrapper[4946]: I1204 15:26:00.676691 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"511a9cd8-e1dc-4f14-b4df-847fda791a30","Type":"ContainerStarted","Data":"82361094770d1133fa93fa5940746142a1853727f06da55ed9ca80cc9594a136"} Dec 04 15:26:00 crc kubenswrapper[4946]: I1204 15:26:00.690755 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-8679d7877f-2wbn9" podStartSLOduration=12.690733459 podStartE2EDuration="12.690733459s" podCreationTimestamp="2025-12-04 15:25:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:26:00.684276215 +0000 UTC m=+1411.570319856" watchObservedRunningTime="2025-12-04 15:26:00.690733459 +0000 UTC m=+1411.576777100" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.495310 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-hckh5"] Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.497536 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hckh5" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.532722 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68d63a4d-c7d0-4978-915e-b538b2ed82b1-operator-scripts\") pod \"nova-api-db-create-hckh5\" (UID: \"68d63a4d-c7d0-4978-915e-b538b2ed82b1\") " pod="openstack/nova-api-db-create-hckh5" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.532803 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv98m\" (UniqueName: \"kubernetes.io/projected/68d63a4d-c7d0-4978-915e-b538b2ed82b1-kube-api-access-hv98m\") pod \"nova-api-db-create-hckh5\" (UID: \"68d63a4d-c7d0-4978-915e-b538b2ed82b1\") " pod="openstack/nova-api-db-create-hckh5" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.535537 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-hckh5"] Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.629803 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-xcjh9"] Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.632281 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-xcjh9" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.636065 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68d63a4d-c7d0-4978-915e-b538b2ed82b1-operator-scripts\") pod \"nova-api-db-create-hckh5\" (UID: \"68d63a4d-c7d0-4978-915e-b538b2ed82b1\") " pod="openstack/nova-api-db-create-hckh5" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.636177 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv98m\" (UniqueName: \"kubernetes.io/projected/68d63a4d-c7d0-4978-915e-b538b2ed82b1-kube-api-access-hv98m\") pod \"nova-api-db-create-hckh5\" (UID: \"68d63a4d-c7d0-4978-915e-b538b2ed82b1\") " pod="openstack/nova-api-db-create-hckh5" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.642731 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68d63a4d-c7d0-4978-915e-b538b2ed82b1-operator-scripts\") pod \"nova-api-db-create-hckh5\" (UID: \"68d63a4d-c7d0-4978-915e-b538b2ed82b1\") " pod="openstack/nova-api-db-create-hckh5" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.668374 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-xcjh9"] Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.701912 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-x5rz8"] Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.715838 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv98m\" (UniqueName: \"kubernetes.io/projected/68d63a4d-c7d0-4978-915e-b538b2ed82b1-kube-api-access-hv98m\") pod \"nova-api-db-create-hckh5\" (UID: \"68d63a4d-c7d0-4978-915e-b538b2ed82b1\") " pod="openstack/nova-api-db-create-hckh5" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.723997 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x5rz8" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.744053 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9860ec84-49a2-4eb0-8706-3ae5c1673add-operator-scripts\") pod \"nova-cell0-db-create-xcjh9\" (UID: \"9860ec84-49a2-4eb0-8706-3ae5c1673add\") " pod="openstack/nova-cell0-db-create-xcjh9" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.744200 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkgmg\" (UniqueName: \"kubernetes.io/projected/9860ec84-49a2-4eb0-8706-3ae5c1673add-kube-api-access-rkgmg\") pod \"nova-cell0-db-create-xcjh9\" (UID: \"9860ec84-49a2-4eb0-8706-3ae5c1673add\") " pod="openstack/nova-cell0-db-create-xcjh9" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.816528 4946 generic.go:334] "Generic (PLEG): container finished" podID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerID="0c85f1b8b663d3558698ef8619bbbb0c2bbb53cc3a3c084c0f23d12eb3f52652" exitCode=0 Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.816678 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20d2aa22-a98c-483e-b74a-dd549ec45640","Type":"ContainerDied","Data":"0c85f1b8b663d3558698ef8619bbbb0c2bbb53cc3a3c084c0f23d12eb3f52652"} Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.857983 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0203bca7-1453-4a7b-8597-5286d1d245b2","Type":"ContainerStarted","Data":"20bf591e13730b9c2838c9e2320d0d34eeae3f2f60eb9001f93c13c3d778a7a7"} Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.858552 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.859439 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkgmg\" (UniqueName: \"kubernetes.io/projected/9860ec84-49a2-4eb0-8706-3ae5c1673add-kube-api-access-rkgmg\") pod \"nova-cell0-db-create-xcjh9\" (UID: \"9860ec84-49a2-4eb0-8706-3ae5c1673add\") " pod="openstack/nova-cell0-db-create-xcjh9" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.859735 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-operator-scripts\") pod \"nova-cell1-db-create-x5rz8\" (UID: \"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb\") " pod="openstack/nova-cell1-db-create-x5rz8" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.859973 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9860ec84-49a2-4eb0-8706-3ae5c1673add-operator-scripts\") pod \"nova-cell0-db-create-xcjh9\" (UID: \"9860ec84-49a2-4eb0-8706-3ae5c1673add\") " pod="openstack/nova-cell0-db-create-xcjh9" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.860332 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvjjt\" (UniqueName: \"kubernetes.io/projected/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-kube-api-access-kvjjt\") pod \"nova-cell1-db-create-x5rz8\" (UID: \"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb\") " pod="openstack/nova-cell1-db-create-x5rz8" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.876243 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9860ec84-49a2-4eb0-8706-3ae5c1673add-operator-scripts\") pod \"nova-cell0-db-create-xcjh9\" (UID: \"9860ec84-49a2-4eb0-8706-3ae5c1673add\") " pod="openstack/nova-cell0-db-create-xcjh9" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.876922 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hckh5" Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.880846 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-x5rz8"] Dec 04 15:26:02 crc kubenswrapper[4946]: I1204 15:26:02.968320 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkgmg\" (UniqueName: \"kubernetes.io/projected/9860ec84-49a2-4eb0-8706-3ae5c1673add-kube-api-access-rkgmg\") pod \"nova-cell0-db-create-xcjh9\" (UID: \"9860ec84-49a2-4eb0-8706-3ae5c1673add\") " pod="openstack/nova-cell0-db-create-xcjh9" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:02.993751 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvjjt\" (UniqueName: \"kubernetes.io/projected/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-kube-api-access-kvjjt\") pod \"nova-cell1-db-create-x5rz8\" (UID: \"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb\") " pod="openstack/nova-cell1-db-create-x5rz8" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:02.996478 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-operator-scripts\") pod \"nova-cell1-db-create-x5rz8\" (UID: \"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb\") " pod="openstack/nova-cell1-db-create-x5rz8" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:02.999107 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-operator-scripts\") pod \"nova-cell1-db-create-x5rz8\" (UID: \"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb\") " pod="openstack/nova-cell1-db-create-x5rz8" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.004205 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-xcjh9" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.054371 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvjjt\" (UniqueName: \"kubernetes.io/projected/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-kube-api-access-kvjjt\") pod \"nova-cell1-db-create-x5rz8\" (UID: \"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb\") " pod="openstack/nova-cell1-db-create-x5rz8" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.054889 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-c9a7-account-create-update-ppsl7"] Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.056731 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c9a7-account-create-update-ppsl7" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.087176 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.120979 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c9a7-account-create-update-ppsl7"] Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.202413 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtq68\" (UniqueName: \"kubernetes.io/projected/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-kube-api-access-wtq68\") pod \"nova-api-c9a7-account-create-update-ppsl7\" (UID: \"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6\") " pod="openstack/nova-api-c9a7-account-create-update-ppsl7" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.202533 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-operator-scripts\") pod \"nova-api-c9a7-account-create-update-ppsl7\" (UID: \"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6\") " pod="openstack/nova-api-c9a7-account-create-update-ppsl7" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.233962 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-345d-account-create-update-zlp2s"] Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.235735 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-345d-account-create-update-zlp2s" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.239303 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.259141 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x5rz8" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.293560 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-345d-account-create-update-zlp2s"] Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.305350 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtq68\" (UniqueName: \"kubernetes.io/projected/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-kube-api-access-wtq68\") pod \"nova-api-c9a7-account-create-update-ppsl7\" (UID: \"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6\") " pod="openstack/nova-api-c9a7-account-create-update-ppsl7" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.305587 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-operator-scripts\") pod \"nova-api-c9a7-account-create-update-ppsl7\" (UID: \"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6\") " pod="openstack/nova-api-c9a7-account-create-update-ppsl7" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.306898 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-operator-scripts\") pod \"nova-api-c9a7-account-create-update-ppsl7\" (UID: \"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6\") " pod="openstack/nova-api-c9a7-account-create-update-ppsl7" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.325819 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.32578611 podStartE2EDuration="6.32578611s" podCreationTimestamp="2025-12-04 15:25:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:26:02.943377988 +0000 UTC m=+1413.829421649" watchObservedRunningTime="2025-12-04 15:26:03.32578611 +0000 UTC m=+1414.211829751" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.330285 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtq68\" (UniqueName: \"kubernetes.io/projected/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-kube-api-access-wtq68\") pod \"nova-api-c9a7-account-create-update-ppsl7\" (UID: \"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6\") " pod="openstack/nova-api-c9a7-account-create-update-ppsl7" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.368844 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-2d75-account-create-update-dr4ts"] Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.371003 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.375049 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.382443 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2d75-account-create-update-dr4ts"] Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.419671 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxslm\" (UniqueName: \"kubernetes.io/projected/86953713-6e7a-4cf7-8817-45785be4930f-kube-api-access-rxslm\") pod \"nova-cell1-2d75-account-create-update-dr4ts\" (UID: \"86953713-6e7a-4cf7-8817-45785be4930f\") " pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.419743 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-operator-scripts\") pod \"nova-cell0-345d-account-create-update-zlp2s\" (UID: \"a2ec6f3e-22c6-496a-b23b-e0d493032eb6\") " pod="openstack/nova-cell0-345d-account-create-update-zlp2s" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.419903 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldfpr\" (UniqueName: \"kubernetes.io/projected/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-kube-api-access-ldfpr\") pod \"nova-cell0-345d-account-create-update-zlp2s\" (UID: \"a2ec6f3e-22c6-496a-b23b-e0d493032eb6\") " pod="openstack/nova-cell0-345d-account-create-update-zlp2s" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.419964 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86953713-6e7a-4cf7-8817-45785be4930f-operator-scripts\") pod \"nova-cell1-2d75-account-create-update-dr4ts\" (UID: \"86953713-6e7a-4cf7-8817-45785be4930f\") " pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.468838 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c9a7-account-create-update-ppsl7" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.525093 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldfpr\" (UniqueName: \"kubernetes.io/projected/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-kube-api-access-ldfpr\") pod \"nova-cell0-345d-account-create-update-zlp2s\" (UID: \"a2ec6f3e-22c6-496a-b23b-e0d493032eb6\") " pod="openstack/nova-cell0-345d-account-create-update-zlp2s" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.525175 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86953713-6e7a-4cf7-8817-45785be4930f-operator-scripts\") pod \"nova-cell1-2d75-account-create-update-dr4ts\" (UID: \"86953713-6e7a-4cf7-8817-45785be4930f\") " pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.525273 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxslm\" (UniqueName: \"kubernetes.io/projected/86953713-6e7a-4cf7-8817-45785be4930f-kube-api-access-rxslm\") pod \"nova-cell1-2d75-account-create-update-dr4ts\" (UID: \"86953713-6e7a-4cf7-8817-45785be4930f\") " pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.525298 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-operator-scripts\") pod \"nova-cell0-345d-account-create-update-zlp2s\" (UID: \"a2ec6f3e-22c6-496a-b23b-e0d493032eb6\") " pod="openstack/nova-cell0-345d-account-create-update-zlp2s" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.528403 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86953713-6e7a-4cf7-8817-45785be4930f-operator-scripts\") pod \"nova-cell1-2d75-account-create-update-dr4ts\" (UID: \"86953713-6e7a-4cf7-8817-45785be4930f\") " pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.529058 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-operator-scripts\") pod \"nova-cell0-345d-account-create-update-zlp2s\" (UID: \"a2ec6f3e-22c6-496a-b23b-e0d493032eb6\") " pod="openstack/nova-cell0-345d-account-create-update-zlp2s" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.562544 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldfpr\" (UniqueName: \"kubernetes.io/projected/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-kube-api-access-ldfpr\") pod \"nova-cell0-345d-account-create-update-zlp2s\" (UID: \"a2ec6f3e-22c6-496a-b23b-e0d493032eb6\") " pod="openstack/nova-cell0-345d-account-create-update-zlp2s" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.562562 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxslm\" (UniqueName: \"kubernetes.io/projected/86953713-6e7a-4cf7-8817-45785be4930f-kube-api-access-rxslm\") pod \"nova-cell1-2d75-account-create-update-dr4ts\" (UID: \"86953713-6e7a-4cf7-8817-45785be4930f\") " pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.565838 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-345d-account-create-update-zlp2s" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.673057 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.712436 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.831967 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-config-data\") pod \"20d2aa22-a98c-483e-b74a-dd549ec45640\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.832556 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-public-tls-certs\") pod \"20d2aa22-a98c-483e-b74a-dd549ec45640\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.832605 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4lb9\" (UniqueName: \"kubernetes.io/projected/20d2aa22-a98c-483e-b74a-dd549ec45640-kube-api-access-h4lb9\") pod \"20d2aa22-a98c-483e-b74a-dd549ec45640\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.843956 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-hckh5"] Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.845344 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20d2aa22-a98c-483e-b74a-dd549ec45640-kube-api-access-h4lb9" (OuterVolumeSpecName: "kube-api-access-h4lb9") pod "20d2aa22-a98c-483e-b74a-dd549ec45640" (UID: "20d2aa22-a98c-483e-b74a-dd549ec45640"). InnerVolumeSpecName "kube-api-access-h4lb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.845524 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"20d2aa22-a98c-483e-b74a-dd549ec45640\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.845626 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-combined-ca-bundle\") pod \"20d2aa22-a98c-483e-b74a-dd549ec45640\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.845714 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-logs\") pod \"20d2aa22-a98c-483e-b74a-dd549ec45640\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.845766 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-scripts\") pod \"20d2aa22-a98c-483e-b74a-dd549ec45640\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.845793 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-httpd-run\") pod \"20d2aa22-a98c-483e-b74a-dd549ec45640\" (UID: \"20d2aa22-a98c-483e-b74a-dd549ec45640\") " Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.846376 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4lb9\" (UniqueName: \"kubernetes.io/projected/20d2aa22-a98c-483e-b74a-dd549ec45640-kube-api-access-h4lb9\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.851756 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "20d2aa22-a98c-483e-b74a-dd549ec45640" (UID: "20d2aa22-a98c-483e-b74a-dd549ec45640"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.855596 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-logs" (OuterVolumeSpecName: "logs") pod "20d2aa22-a98c-483e-b74a-dd549ec45640" (UID: "20d2aa22-a98c-483e-b74a-dd549ec45640"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.868336 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-scripts" (OuterVolumeSpecName: "scripts") pod "20d2aa22-a98c-483e-b74a-dd549ec45640" (UID: "20d2aa22-a98c-483e-b74a-dd549ec45640"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.899889 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a" (OuterVolumeSpecName: "glance") pod "20d2aa22-a98c-483e-b74a-dd549ec45640" (UID: "20d2aa22-a98c-483e-b74a-dd549ec45640"). InnerVolumeSpecName "pvc-c2443261-ac2f-492a-9ba5-293baae6928a". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.924780 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20d2aa22-a98c-483e-b74a-dd549ec45640" (UID: "20d2aa22-a98c-483e-b74a-dd549ec45640"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.949485 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.949514 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.949524 4946 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20d2aa22-a98c-483e-b74a-dd549ec45640-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.949562 4946 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") on node \"crc\" " Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.949575 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.961154 4946 generic.go:334] "Generic (PLEG): container finished" podID="978d45f1-6556-4486-8175-29a7f68b263a" containerID="7df9632d6581dd2761d69f8860814b2ebc9925d10e95483d83136bde92417db7" exitCode=0 Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.961451 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"978d45f1-6556-4486-8175-29a7f68b263a","Type":"ContainerDied","Data":"7df9632d6581dd2761d69f8860814b2ebc9925d10e95483d83136bde92417db7"} Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.963930 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-hckh5" event={"ID":"68d63a4d-c7d0-4978-915e-b538b2ed82b1","Type":"ContainerStarted","Data":"586b2633cc5df84fca428a5f4cfe89f4fb4aa8f757bc8134e66f2dfebcc15e8f"} Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.987879 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20d2aa22-a98c-483e-b74a-dd549ec45640","Type":"ContainerDied","Data":"903b1db54866acba13c6cc308b151db03c43fb9a1094240ea2ade31543761de8"} Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.987950 4946 scope.go:117] "RemoveContainer" containerID="0c85f1b8b663d3558698ef8619bbbb0c2bbb53cc3a3c084c0f23d12eb3f52652" Dec 04 15:26:03 crc kubenswrapper[4946]: I1204 15:26:03.988197 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.018239 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"511a9cd8-e1dc-4f14-b4df-847fda791a30","Type":"ContainerStarted","Data":"ae2dcbec5f6c9d27cf104b3aca7dd1babf779aa9ed8e43c512eba34160c84f7c"} Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.019627 4946 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.019782 4946 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c2443261-ac2f-492a-9ba5-293baae6928a" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a") on node "crc" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.021888 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-xcjh9"] Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.026305 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-config-data" (OuterVolumeSpecName: "config-data") pod "20d2aa22-a98c-483e-b74a-dd549ec45640" (UID: "20d2aa22-a98c-483e-b74a-dd549ec45640"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.051977 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.052027 4946 reconciler_common.go:293] "Volume detached for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.056884 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "20d2aa22-a98c-483e-b74a-dd549ec45640" (UID: "20d2aa22-a98c-483e-b74a-dd549ec45640"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.158952 4946 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20d2aa22-a98c-483e-b74a-dd549ec45640-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.171848 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-x5rz8"] Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.351268 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.366306 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.401092 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:26:04 crc kubenswrapper[4946]: E1204 15:26:04.401648 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerName="glance-log" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.401660 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerName="glance-log" Dec 04 15:26:04 crc kubenswrapper[4946]: E1204 15:26:04.401690 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerName="glance-httpd" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.401696 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerName="glance-httpd" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.403513 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerName="glance-httpd" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.403542 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="20d2aa22-a98c-483e-b74a-dd549ec45640" containerName="glance-log" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.448770 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.448941 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.453108 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.453490 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.483693 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c9a7-account-create-update-ppsl7"] Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.513926 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2d75-account-create-update-dr4ts"] Dec 04 15:26:04 crc kubenswrapper[4946]: E1204 15:26:04.517618 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20d2aa22_a98c_483e_b74a_dd549ec45640.slice/crio-903b1db54866acba13c6cc308b151db03c43fb9a1094240ea2ade31543761de8\": RecentStats: unable to find data in memory cache]" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.575362 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-scripts\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.580463 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.580684 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57cbb428-8955-4aa2-9025-cfdd74592074-logs\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.580822 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w8p2\" (UniqueName: \"kubernetes.io/projected/57cbb428-8955-4aa2-9025-cfdd74592074-kube-api-access-5w8p2\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.581037 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.581313 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.583497 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-config-data\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.583543 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57cbb428-8955-4aa2-9025-cfdd74592074-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.646383 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-345d-account-create-update-zlp2s"] Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.685894 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-config-data\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.685943 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57cbb428-8955-4aa2-9025-cfdd74592074-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.685975 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-scripts\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.686016 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.686050 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57cbb428-8955-4aa2-9025-cfdd74592074-logs\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.686076 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w8p2\" (UniqueName: \"kubernetes.io/projected/57cbb428-8955-4aa2-9025-cfdd74592074-kube-api-access-5w8p2\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.686130 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.686170 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.690162 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57cbb428-8955-4aa2-9025-cfdd74592074-logs\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.691635 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57cbb428-8955-4aa2-9025-cfdd74592074-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.693107 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-config-data\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.693692 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.693720 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f3260bbd2cdb28f5793a9d1edc63c254c747b9e66cce69dc3f280fc78b1b134b/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.700709 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.703239 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.705777 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57cbb428-8955-4aa2-9025-cfdd74592074-scripts\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.707476 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w8p2\" (UniqueName: \"kubernetes.io/projected/57cbb428-8955-4aa2-9025-cfdd74592074-kube-api-access-5w8p2\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.757602 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c2443261-ac2f-492a-9ba5-293baae6928a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c2443261-ac2f-492a-9ba5-293baae6928a\") pod \"glance-default-external-api-0\" (UID: \"57cbb428-8955-4aa2-9025-cfdd74592074\") " pod="openstack/glance-default-external-api-0" Dec 04 15:26:04 crc kubenswrapper[4946]: I1204 15:26:04.783474 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 15:26:05 crc kubenswrapper[4946]: I1204 15:26:05.029101 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-xcjh9" event={"ID":"9860ec84-49a2-4eb0-8706-3ae5c1673add","Type":"ContainerStarted","Data":"0ae26c23740ebd78875502d605b120abca8b0d355dd9a4453af4efd0d3457f81"} Dec 04 15:26:05 crc kubenswrapper[4946]: I1204 15:26:05.468267 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20d2aa22-a98c-483e-b74a-dd549ec45640" path="/var/lib/kubelet/pods/20d2aa22-a98c-483e-b74a-dd549ec45640/volumes" Dec 04 15:26:05 crc kubenswrapper[4946]: I1204 15:26:05.482944 4946 scope.go:117] "RemoveContainer" containerID="2012190c874c38dc1fcf608c073022d9d04b51c5f1049415ab6b9593e4e9b297" Dec 04 15:26:06 crc kubenswrapper[4946]: I1204 15:26:06.103012 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-x5rz8" event={"ID":"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb","Type":"ContainerStarted","Data":"6b20e80bc808c784408f95cce4c4d9825761f176bc6dc67f84e755c35cee655e"} Dec 04 15:26:06 crc kubenswrapper[4946]: I1204 15:26:06.117901 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" event={"ID":"86953713-6e7a-4cf7-8817-45785be4930f","Type":"ContainerStarted","Data":"1ba01ba44323fa29ca778cf947ea517c9a8d44632691f8f470075e3bb40cf68f"} Dec 04 15:26:06 crc kubenswrapper[4946]: I1204 15:26:06.162634 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-hckh5" event={"ID":"68d63a4d-c7d0-4978-915e-b538b2ed82b1","Type":"ContainerStarted","Data":"920741b2938a779a2e6a267850ce5c3c9c8d3bf182f5b80c6b51fe1867dde70c"} Dec 04 15:26:06 crc kubenswrapper[4946]: I1204 15:26:06.187150 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c9a7-account-create-update-ppsl7" event={"ID":"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6","Type":"ContainerStarted","Data":"f079acb6622139e6fac34f56ff087c67cb9bf727fcc70117132f36a90f47ee54"} Dec 04 15:26:06 crc kubenswrapper[4946]: I1204 15:26:06.205495 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-345d-account-create-update-zlp2s" event={"ID":"a2ec6f3e-22c6-496a-b23b-e0d493032eb6","Type":"ContainerStarted","Data":"9edbe525f3a04982d6f8ad054541f7eccdf8b98e4ff3549c586136128705bfcf"} Dec 04 15:26:06 crc kubenswrapper[4946]: I1204 15:26:06.222980 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-hckh5" podStartSLOduration=4.222952941 podStartE2EDuration="4.222952941s" podCreationTimestamp="2025-12-04 15:26:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:26:06.195973794 +0000 UTC m=+1417.082017435" watchObservedRunningTime="2025-12-04 15:26:06.222952941 +0000 UTC m=+1417.108996582" Dec 04 15:26:06 crc kubenswrapper[4946]: W1204 15:26:06.504082 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57cbb428_8955_4aa2_9025_cfdd74592074.slice/crio-bcac286879284c3034b23c96e7c42ad4add114c6857a3b61e135dfbd9c869c12 WatchSource:0}: Error finding container bcac286879284c3034b23c96e7c42ad4add114c6857a3b61e135dfbd9c869c12: Status 404 returned error can't find the container with id bcac286879284c3034b23c96e7c42ad4add114c6857a3b61e135dfbd9c869c12 Dec 04 15:26:06 crc kubenswrapper[4946]: I1204 15:26:06.505810 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 15:26:06 crc kubenswrapper[4946]: I1204 15:26:06.936444 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.097891 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6255p\" (UniqueName: \"kubernetes.io/projected/978d45f1-6556-4486-8175-29a7f68b263a-kube-api-access-6255p\") pod \"978d45f1-6556-4486-8175-29a7f68b263a\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.097954 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-scripts\") pod \"978d45f1-6556-4486-8175-29a7f68b263a\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.097987 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-internal-tls-certs\") pod \"978d45f1-6556-4486-8175-29a7f68b263a\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.098131 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"978d45f1-6556-4486-8175-29a7f68b263a\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.098223 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-logs\") pod \"978d45f1-6556-4486-8175-29a7f68b263a\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.098276 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-httpd-run\") pod \"978d45f1-6556-4486-8175-29a7f68b263a\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.098323 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-config-data\") pod \"978d45f1-6556-4486-8175-29a7f68b263a\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.098419 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-combined-ca-bundle\") pod \"978d45f1-6556-4486-8175-29a7f68b263a\" (UID: \"978d45f1-6556-4486-8175-29a7f68b263a\") " Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.099656 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "978d45f1-6556-4486-8175-29a7f68b263a" (UID: "978d45f1-6556-4486-8175-29a7f68b263a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.099979 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-logs" (OuterVolumeSpecName: "logs") pod "978d45f1-6556-4486-8175-29a7f68b263a" (UID: "978d45f1-6556-4486-8175-29a7f68b263a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.100617 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.100646 4946 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/978d45f1-6556-4486-8175-29a7f68b263a-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.112004 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-scripts" (OuterVolumeSpecName: "scripts") pod "978d45f1-6556-4486-8175-29a7f68b263a" (UID: "978d45f1-6556-4486-8175-29a7f68b263a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.123333 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/978d45f1-6556-4486-8175-29a7f68b263a-kube-api-access-6255p" (OuterVolumeSpecName: "kube-api-access-6255p") pod "978d45f1-6556-4486-8175-29a7f68b263a" (UID: "978d45f1-6556-4486-8175-29a7f68b263a"). InnerVolumeSpecName "kube-api-access-6255p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.161569 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "978d45f1-6556-4486-8175-29a7f68b263a" (UID: "978d45f1-6556-4486-8175-29a7f68b263a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.168609 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe" (OuterVolumeSpecName: "glance") pod "978d45f1-6556-4486-8175-29a7f68b263a" (UID: "978d45f1-6556-4486-8175-29a7f68b263a"). InnerVolumeSpecName "pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.202258 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.202288 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6255p\" (UniqueName: \"kubernetes.io/projected/978d45f1-6556-4486-8175-29a7f68b263a-kube-api-access-6255p\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.202299 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.202330 4946 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") on node \"crc\" " Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.212814 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "978d45f1-6556-4486-8175-29a7f68b263a" (UID: "978d45f1-6556-4486-8175-29a7f68b263a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.243450 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-config-data" (OuterVolumeSpecName: "config-data") pod "978d45f1-6556-4486-8175-29a7f68b263a" (UID: "978d45f1-6556-4486-8175-29a7f68b263a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.250110 4946 generic.go:334] "Generic (PLEG): container finished" podID="9860ec84-49a2-4eb0-8706-3ae5c1673add" containerID="e46b51b74646d5bd52724cfabafdf8c2d8b25c312f724c407bcee797039f3cef" exitCode=0 Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.250293 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-xcjh9" event={"ID":"9860ec84-49a2-4eb0-8706-3ae5c1673add","Type":"ContainerDied","Data":"e46b51b74646d5bd52724cfabafdf8c2d8b25c312f724c407bcee797039f3cef"} Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.280998 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"511a9cd8-e1dc-4f14-b4df-847fda791a30","Type":"ContainerStarted","Data":"a5bb6013d244c1acc4273f94ed331aff51570fa19cd7406ad2f1730877eb6b53"} Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.292043 4946 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.292349 4946 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe") on node "crc" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.304782 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"978d45f1-6556-4486-8175-29a7f68b263a","Type":"ContainerDied","Data":"53f99e0589e14fadfeec81bad52c91ad056af2f06d8cd4649d377be02c6dda10"} Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.304878 4946 scope.go:117] "RemoveContainer" containerID="7df9632d6581dd2761d69f8860814b2ebc9925d10e95483d83136bde92417db7" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.305172 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.315381 4946 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.320632 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" event={"ID":"86953713-6e7a-4cf7-8817-45785be4930f","Type":"ContainerStarted","Data":"a40838ee8e3ee2ded2de941fa076df2821b8ae4ed75c782d9d6730c4733932cc"} Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.321778 4946 reconciler_common.go:293] "Volume detached for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.323182 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/978d45f1-6556-4486-8175-29a7f68b263a-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.331518 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57cbb428-8955-4aa2-9025-cfdd74592074","Type":"ContainerStarted","Data":"bcac286879284c3034b23c96e7c42ad4add114c6857a3b61e135dfbd9c869c12"} Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.357537 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" podStartSLOduration=4.357512093 podStartE2EDuration="4.357512093s" podCreationTimestamp="2025-12-04 15:26:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:26:07.34884042 +0000 UTC m=+1418.234884061" watchObservedRunningTime="2025-12-04 15:26:07.357512093 +0000 UTC m=+1418.243555734" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.360939 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c9a7-account-create-update-ppsl7" event={"ID":"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6","Type":"ContainerStarted","Data":"c466309c8472522b4385b9b2a542c9aa161d57b77dfb8bd624ed1560295a6d7f"} Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.371542 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-345d-account-create-update-zlp2s" event={"ID":"a2ec6f3e-22c6-496a-b23b-e0d493032eb6","Type":"ContainerStarted","Data":"2cf164bcb1ae2e32ca2e90cd989a94547ae913e877927ac9aec86b316a27a8a4"} Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.394496 4946 generic.go:334] "Generic (PLEG): container finished" podID="c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb" containerID="267ffbe080c11d25c74b3bf9a7e1e5a4006d14e666ed12a6eac29d148f294126" exitCode=0 Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.395072 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-x5rz8" event={"ID":"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb","Type":"ContainerDied","Data":"267ffbe080c11d25c74b3bf9a7e1e5a4006d14e666ed12a6eac29d148f294126"} Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.395595 4946 scope.go:117] "RemoveContainer" containerID="934b953f53f98d777b40fbcf75dc9881b567c4b6fbcf42480848bfe6f4d35af1" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.398547 4946 generic.go:334] "Generic (PLEG): container finished" podID="68d63a4d-c7d0-4978-915e-b538b2ed82b1" containerID="920741b2938a779a2e6a267850ce5c3c9c8d3bf182f5b80c6b51fe1867dde70c" exitCode=0 Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.398721 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-hckh5" event={"ID":"68d63a4d-c7d0-4978-915e-b538b2ed82b1","Type":"ContainerDied","Data":"920741b2938a779a2e6a267850ce5c3c9c8d3bf182f5b80c6b51fe1867dde70c"} Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.407819 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.430300 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.446982 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-c9a7-account-create-update-ppsl7" podStartSLOduration=5.446951572 podStartE2EDuration="5.446951572s" podCreationTimestamp="2025-12-04 15:26:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:26:07.408750533 +0000 UTC m=+1418.294794174" watchObservedRunningTime="2025-12-04 15:26:07.446951572 +0000 UTC m=+1418.332995213" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.477412 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="978d45f1-6556-4486-8175-29a7f68b263a" path="/var/lib/kubelet/pods/978d45f1-6556-4486-8175-29a7f68b263a/volumes" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.478081 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:26:07 crc kubenswrapper[4946]: E1204 15:26:07.478572 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="978d45f1-6556-4486-8175-29a7f68b263a" containerName="glance-log" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.478587 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="978d45f1-6556-4486-8175-29a7f68b263a" containerName="glance-log" Dec 04 15:26:07 crc kubenswrapper[4946]: E1204 15:26:07.478627 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="978d45f1-6556-4486-8175-29a7f68b263a" containerName="glance-httpd" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.478633 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="978d45f1-6556-4486-8175-29a7f68b263a" containerName="glance-httpd" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.478866 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="978d45f1-6556-4486-8175-29a7f68b263a" containerName="glance-log" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.478886 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="978d45f1-6556-4486-8175-29a7f68b263a" containerName="glance-httpd" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.480444 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.485154 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.485441 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.500838 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.504886 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-345d-account-create-update-zlp2s" podStartSLOduration=5.504852572 podStartE2EDuration="5.504852572s" podCreationTimestamp="2025-12-04 15:26:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:26:07.441843935 +0000 UTC m=+1418.327887576" watchObservedRunningTime="2025-12-04 15:26:07.504852572 +0000 UTC m=+1418.390896203" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.637387 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b2521f9-40c6-4e13-a510-68d5dc34b313-logs\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.637436 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.637467 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnd42\" (UniqueName: \"kubernetes.io/projected/0b2521f9-40c6-4e13-a510-68d5dc34b313-kube-api-access-pnd42\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.637875 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.638004 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.638257 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.638413 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.638742 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0b2521f9-40c6-4e13-a510-68d5dc34b313-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.740922 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0b2521f9-40c6-4e13-a510-68d5dc34b313-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.740999 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b2521f9-40c6-4e13-a510-68d5dc34b313-logs\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.741019 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.741041 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnd42\" (UniqueName: \"kubernetes.io/projected/0b2521f9-40c6-4e13-a510-68d5dc34b313-kube-api-access-pnd42\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.741100 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.741141 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.741180 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.741207 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.742836 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b2521f9-40c6-4e13-a510-68d5dc34b313-logs\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.743076 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0b2521f9-40c6-4e13-a510-68d5dc34b313-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.747915 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.750132 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.751172 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.751234 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d80d3a2b37ec7093e1c47ec6e0b9eb3b02741300f62422d1aa8919218995c41a/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.751440 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.753870 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b2521f9-40c6-4e13-a510-68d5dc34b313-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.767826 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnd42\" (UniqueName: \"kubernetes.io/projected/0b2521f9-40c6-4e13-a510-68d5dc34b313-kube-api-access-pnd42\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.818011 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7590d444-2b2e-480e-9f9f-1850b32c12fe\") pod \"glance-default-internal-api-0\" (UID: \"0b2521f9-40c6-4e13-a510-68d5dc34b313\") " pod="openstack/glance-default-internal-api-0" Dec 04 15:26:07 crc kubenswrapper[4946]: I1204 15:26:07.888753 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:08 crc kubenswrapper[4946]: I1204 15:26:08.422073 4946 generic.go:334] "Generic (PLEG): container finished" podID="86953713-6e7a-4cf7-8817-45785be4930f" containerID="a40838ee8e3ee2ded2de941fa076df2821b8ae4ed75c782d9d6730c4733932cc" exitCode=0 Dec 04 15:26:08 crc kubenswrapper[4946]: I1204 15:26:08.422154 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" event={"ID":"86953713-6e7a-4cf7-8817-45785be4930f","Type":"ContainerDied","Data":"a40838ee8e3ee2ded2de941fa076df2821b8ae4ed75c782d9d6730c4733932cc"} Dec 04 15:26:08 crc kubenswrapper[4946]: I1204 15:26:08.423983 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57cbb428-8955-4aa2-9025-cfdd74592074","Type":"ContainerStarted","Data":"707a7402492f972e60631d3adac35f5520c9f754949646030effb030b1a58b68"} Dec 04 15:26:08 crc kubenswrapper[4946]: I1204 15:26:08.425843 4946 generic.go:334] "Generic (PLEG): container finished" podID="4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6" containerID="c466309c8472522b4385b9b2a542c9aa161d57b77dfb8bd624ed1560295a6d7f" exitCode=0 Dec 04 15:26:08 crc kubenswrapper[4946]: I1204 15:26:08.425902 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c9a7-account-create-update-ppsl7" event={"ID":"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6","Type":"ContainerDied","Data":"c466309c8472522b4385b9b2a542c9aa161d57b77dfb8bd624ed1560295a6d7f"} Dec 04 15:26:08 crc kubenswrapper[4946]: I1204 15:26:08.427410 4946 generic.go:334] "Generic (PLEG): container finished" podID="a2ec6f3e-22c6-496a-b23b-e0d493032eb6" containerID="2cf164bcb1ae2e32ca2e90cd989a94547ae913e877927ac9aec86b316a27a8a4" exitCode=0 Dec 04 15:26:08 crc kubenswrapper[4946]: I1204 15:26:08.427613 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-345d-account-create-update-zlp2s" event={"ID":"a2ec6f3e-22c6-496a-b23b-e0d493032eb6","Type":"ContainerDied","Data":"2cf164bcb1ae2e32ca2e90cd989a94547ae913e877927ac9aec86b316a27a8a4"} Dec 04 15:26:08 crc kubenswrapper[4946]: I1204 15:26:08.753848 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 15:26:08 crc kubenswrapper[4946]: W1204 15:26:08.780387 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b2521f9_40c6_4e13_a510_68d5dc34b313.slice/crio-e29f2ea92b5fad8601a78d97e6c2f63dd249a3bcf1a571d41df59feb3e4a228b WatchSource:0}: Error finding container e29f2ea92b5fad8601a78d97e6c2f63dd249a3bcf1a571d41df59feb3e4a228b: Status 404 returned error can't find the container with id e29f2ea92b5fad8601a78d97e6c2f63dd249a3bcf1a571d41df59feb3e4a228b Dec 04 15:26:08 crc kubenswrapper[4946]: I1204 15:26:08.842507 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:26:08 crc kubenswrapper[4946]: I1204 15:26:08.845698 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-8679d7877f-2wbn9" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.229179 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x5rz8" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.301769 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hckh5" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.391894 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68d63a4d-c7d0-4978-915e-b538b2ed82b1-operator-scripts\") pod \"68d63a4d-c7d0-4978-915e-b538b2ed82b1\" (UID: \"68d63a4d-c7d0-4978-915e-b538b2ed82b1\") " Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.392292 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hv98m\" (UniqueName: \"kubernetes.io/projected/68d63a4d-c7d0-4978-915e-b538b2ed82b1-kube-api-access-hv98m\") pod \"68d63a4d-c7d0-4978-915e-b538b2ed82b1\" (UID: \"68d63a4d-c7d0-4978-915e-b538b2ed82b1\") " Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.392496 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-operator-scripts\") pod \"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb\" (UID: \"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb\") " Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.392556 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvjjt\" (UniqueName: \"kubernetes.io/projected/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-kube-api-access-kvjjt\") pod \"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb\" (UID: \"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb\") " Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.393818 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68d63a4d-c7d0-4978-915e-b538b2ed82b1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "68d63a4d-c7d0-4978-915e-b538b2ed82b1" (UID: "68d63a4d-c7d0-4978-915e-b538b2ed82b1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.393841 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb" (UID: "c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.405303 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-kube-api-access-kvjjt" (OuterVolumeSpecName: "kube-api-access-kvjjt") pod "c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb" (UID: "c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb"). InnerVolumeSpecName "kube-api-access-kvjjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.405426 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68d63a4d-c7d0-4978-915e-b538b2ed82b1-kube-api-access-hv98m" (OuterVolumeSpecName: "kube-api-access-hv98m") pod "68d63a4d-c7d0-4978-915e-b538b2ed82b1" (UID: "68d63a4d-c7d0-4978-915e-b538b2ed82b1"). InnerVolumeSpecName "kube-api-access-hv98m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.449215 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hv98m\" (UniqueName: \"kubernetes.io/projected/68d63a4d-c7d0-4978-915e-b538b2ed82b1-kube-api-access-hv98m\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.449282 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.449295 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvjjt\" (UniqueName: \"kubernetes.io/projected/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb-kube-api-access-kvjjt\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.449307 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68d63a4d-c7d0-4978-915e-b538b2ed82b1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.502983 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hckh5" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.503080 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x5rz8" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.513464 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-hckh5" event={"ID":"68d63a4d-c7d0-4978-915e-b538b2ed82b1","Type":"ContainerDied","Data":"586b2633cc5df84fca428a5f4cfe89f4fb4aa8f757bc8134e66f2dfebcc15e8f"} Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.513514 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="586b2633cc5df84fca428a5f4cfe89f4fb4aa8f757bc8134e66f2dfebcc15e8f" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.513527 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0b2521f9-40c6-4e13-a510-68d5dc34b313","Type":"ContainerStarted","Data":"e29f2ea92b5fad8601a78d97e6c2f63dd249a3bcf1a571d41df59feb3e4a228b"} Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.513544 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-x5rz8" event={"ID":"c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb","Type":"ContainerDied","Data":"6b20e80bc808c784408f95cce4c4d9825761f176bc6dc67f84e755c35cee655e"} Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.513557 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b20e80bc808c784408f95cce4c4d9825761f176bc6dc67f84e755c35cee655e" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.628744 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-xcjh9" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.758683 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9860ec84-49a2-4eb0-8706-3ae5c1673add-operator-scripts\") pod \"9860ec84-49a2-4eb0-8706-3ae5c1673add\" (UID: \"9860ec84-49a2-4eb0-8706-3ae5c1673add\") " Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.759219 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkgmg\" (UniqueName: \"kubernetes.io/projected/9860ec84-49a2-4eb0-8706-3ae5c1673add-kube-api-access-rkgmg\") pod \"9860ec84-49a2-4eb0-8706-3ae5c1673add\" (UID: \"9860ec84-49a2-4eb0-8706-3ae5c1673add\") " Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.762027 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9860ec84-49a2-4eb0-8706-3ae5c1673add-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9860ec84-49a2-4eb0-8706-3ae5c1673add" (UID: "9860ec84-49a2-4eb0-8706-3ae5c1673add"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.778357 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9860ec84-49a2-4eb0-8706-3ae5c1673add-kube-api-access-rkgmg" (OuterVolumeSpecName: "kube-api-access-rkgmg") pod "9860ec84-49a2-4eb0-8706-3ae5c1673add" (UID: "9860ec84-49a2-4eb0-8706-3ae5c1673add"). InnerVolumeSpecName "kube-api-access-rkgmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.861728 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkgmg\" (UniqueName: \"kubernetes.io/projected/9860ec84-49a2-4eb0-8706-3ae5c1673add-kube-api-access-rkgmg\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:09 crc kubenswrapper[4946]: I1204 15:26:09.861772 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9860ec84-49a2-4eb0-8706-3ae5c1673add-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.135724 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-345d-account-create-update-zlp2s" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.285288 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-operator-scripts\") pod \"a2ec6f3e-22c6-496a-b23b-e0d493032eb6\" (UID: \"a2ec6f3e-22c6-496a-b23b-e0d493032eb6\") " Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.286710 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldfpr\" (UniqueName: \"kubernetes.io/projected/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-kube-api-access-ldfpr\") pod \"a2ec6f3e-22c6-496a-b23b-e0d493032eb6\" (UID: \"a2ec6f3e-22c6-496a-b23b-e0d493032eb6\") " Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.285967 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a2ec6f3e-22c6-496a-b23b-e0d493032eb6" (UID: "a2ec6f3e-22c6-496a-b23b-e0d493032eb6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.287491 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.293340 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-kube-api-access-ldfpr" (OuterVolumeSpecName: "kube-api-access-ldfpr") pod "a2ec6f3e-22c6-496a-b23b-e0d493032eb6" (UID: "a2ec6f3e-22c6-496a-b23b-e0d493032eb6"). InnerVolumeSpecName "kube-api-access-ldfpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.370156 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.381517 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c9a7-account-create-update-ppsl7" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.391244 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldfpr\" (UniqueName: \"kubernetes.io/projected/a2ec6f3e-22c6-496a-b23b-e0d493032eb6-kube-api-access-ldfpr\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.492820 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxslm\" (UniqueName: \"kubernetes.io/projected/86953713-6e7a-4cf7-8817-45785be4930f-kube-api-access-rxslm\") pod \"86953713-6e7a-4cf7-8817-45785be4930f\" (UID: \"86953713-6e7a-4cf7-8817-45785be4930f\") " Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.492902 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-operator-scripts\") pod \"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6\" (UID: \"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6\") " Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.493063 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtq68\" (UniqueName: \"kubernetes.io/projected/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-kube-api-access-wtq68\") pod \"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6\" (UID: \"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6\") " Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.493123 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86953713-6e7a-4cf7-8817-45785be4930f-operator-scripts\") pod \"86953713-6e7a-4cf7-8817-45785be4930f\" (UID: \"86953713-6e7a-4cf7-8817-45785be4930f\") " Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.493593 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6" (UID: "4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.494034 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86953713-6e7a-4cf7-8817-45785be4930f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "86953713-6e7a-4cf7-8817-45785be4930f" (UID: "86953713-6e7a-4cf7-8817-45785be4930f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.495251 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.496967 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86953713-6e7a-4cf7-8817-45785be4930f-kube-api-access-rxslm" (OuterVolumeSpecName: "kube-api-access-rxslm") pod "86953713-6e7a-4cf7-8817-45785be4930f" (UID: "86953713-6e7a-4cf7-8817-45785be4930f"). InnerVolumeSpecName "kube-api-access-rxslm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.498784 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-kube-api-access-wtq68" (OuterVolumeSpecName: "kube-api-access-wtq68") pod "4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6" (UID: "4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6"). InnerVolumeSpecName "kube-api-access-wtq68". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.501729 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" event={"ID":"86953713-6e7a-4cf7-8817-45785be4930f","Type":"ContainerDied","Data":"1ba01ba44323fa29ca778cf947ea517c9a8d44632691f8f470075e3bb40cf68f"} Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.501782 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ba01ba44323fa29ca778cf947ea517c9a8d44632691f8f470075e3bb40cf68f" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.501895 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2d75-account-create-update-dr4ts" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.511355 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c9a7-account-create-update-ppsl7" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.511356 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c9a7-account-create-update-ppsl7" event={"ID":"4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6","Type":"ContainerDied","Data":"f079acb6622139e6fac34f56ff087c67cb9bf727fcc70117132f36a90f47ee54"} Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.511406 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f079acb6622139e6fac34f56ff087c67cb9bf727fcc70117132f36a90f47ee54" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.513905 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-345d-account-create-update-zlp2s" event={"ID":"a2ec6f3e-22c6-496a-b23b-e0d493032eb6","Type":"ContainerDied","Data":"9edbe525f3a04982d6f8ad054541f7eccdf8b98e4ff3549c586136128705bfcf"} Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.513947 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9edbe525f3a04982d6f8ad054541f7eccdf8b98e4ff3549c586136128705bfcf" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.514012 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-345d-account-create-update-zlp2s" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.521810 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-xcjh9" event={"ID":"9860ec84-49a2-4eb0-8706-3ae5c1673add","Type":"ContainerDied","Data":"0ae26c23740ebd78875502d605b120abca8b0d355dd9a4453af4efd0d3457f81"} Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.521859 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ae26c23740ebd78875502d605b120abca8b0d355dd9a4453af4efd0d3457f81" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.521933 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-xcjh9" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.597723 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtq68\" (UniqueName: \"kubernetes.io/projected/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6-kube-api-access-wtq68\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.598023 4946 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86953713-6e7a-4cf7-8817-45785be4930f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:10 crc kubenswrapper[4946]: I1204 15:26:10.598083 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxslm\" (UniqueName: \"kubernetes.io/projected/86953713-6e7a-4cf7-8817-45785be4930f-kube-api-access-rxslm\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:11 crc kubenswrapper[4946]: I1204 15:26:11.565371 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0b2521f9-40c6-4e13-a510-68d5dc34b313","Type":"ContainerStarted","Data":"497480ae3b1d33f7b2a7ce2b69e9c94257f73a60d9c8773f56946d3f35e2faa7"} Dec 04 15:26:11 crc kubenswrapper[4946]: I1204 15:26:11.570314 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57cbb428-8955-4aa2-9025-cfdd74592074","Type":"ContainerStarted","Data":"1ce1aa2215e05b3c0e196ade4f907f14cf19bf0c0839a7286c7959188ec61dbd"} Dec 04 15:26:11 crc kubenswrapper[4946]: I1204 15:26:11.606474 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.606426187 podStartE2EDuration="7.606426187s" podCreationTimestamp="2025-12-04 15:26:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:26:11.605076711 +0000 UTC m=+1422.491120352" watchObservedRunningTime="2025-12-04 15:26:11.606426187 +0000 UTC m=+1422.492469828" Dec 04 15:26:11 crc kubenswrapper[4946]: I1204 15:26:11.625821 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 04 15:26:12 crc kubenswrapper[4946]: I1204 15:26:12.595411 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0b2521f9-40c6-4e13-a510-68d5dc34b313","Type":"ContainerStarted","Data":"d9f03dfc928786491327d0f9c342dee8574249afa735c9687fb84359df0abab1"} Dec 04 15:26:12 crc kubenswrapper[4946]: I1204 15:26:12.641976 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.641941611 podStartE2EDuration="5.641941611s" podCreationTimestamp="2025-12-04 15:26:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:26:12.6214908 +0000 UTC m=+1423.507534441" watchObservedRunningTime="2025-12-04 15:26:12.641941611 +0000 UTC m=+1423.527985252" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.336031 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tmjn4"] Dec 04 15:26:13 crc kubenswrapper[4946]: E1204 15:26:13.337036 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68d63a4d-c7d0-4978-915e-b538b2ed82b1" containerName="mariadb-database-create" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337058 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="68d63a4d-c7d0-4978-915e-b538b2ed82b1" containerName="mariadb-database-create" Dec 04 15:26:13 crc kubenswrapper[4946]: E1204 15:26:13.337075 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86953713-6e7a-4cf7-8817-45785be4930f" containerName="mariadb-account-create-update" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337082 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="86953713-6e7a-4cf7-8817-45785be4930f" containerName="mariadb-account-create-update" Dec 04 15:26:13 crc kubenswrapper[4946]: E1204 15:26:13.337121 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9860ec84-49a2-4eb0-8706-3ae5c1673add" containerName="mariadb-database-create" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337128 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="9860ec84-49a2-4eb0-8706-3ae5c1673add" containerName="mariadb-database-create" Dec 04 15:26:13 crc kubenswrapper[4946]: E1204 15:26:13.337157 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2ec6f3e-22c6-496a-b23b-e0d493032eb6" containerName="mariadb-account-create-update" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337164 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2ec6f3e-22c6-496a-b23b-e0d493032eb6" containerName="mariadb-account-create-update" Dec 04 15:26:13 crc kubenswrapper[4946]: E1204 15:26:13.337177 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6" containerName="mariadb-account-create-update" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337186 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6" containerName="mariadb-account-create-update" Dec 04 15:26:13 crc kubenswrapper[4946]: E1204 15:26:13.337204 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb" containerName="mariadb-database-create" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337213 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb" containerName="mariadb-database-create" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337443 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="9860ec84-49a2-4eb0-8706-3ae5c1673add" containerName="mariadb-database-create" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337459 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2ec6f3e-22c6-496a-b23b-e0d493032eb6" containerName="mariadb-account-create-update" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337468 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb" containerName="mariadb-database-create" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337481 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="68d63a4d-c7d0-4978-915e-b538b2ed82b1" containerName="mariadb-database-create" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337494 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="86953713-6e7a-4cf7-8817-45785be4930f" containerName="mariadb-account-create-update" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.337512 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6" containerName="mariadb-account-create-update" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.338480 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.341041 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.341258 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-rnhps" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.345236 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.348265 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tmjn4"] Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.425152 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-scripts\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.425245 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxddl\" (UniqueName: \"kubernetes.io/projected/5b221907-3033-4f08-b4b4-78fca89f7876-kube-api-access-dxddl\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.425669 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-config-data\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.425803 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.527844 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-config-data\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.527931 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.528022 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-scripts\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.528074 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxddl\" (UniqueName: \"kubernetes.io/projected/5b221907-3033-4f08-b4b4-78fca89f7876-kube-api-access-dxddl\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.538092 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.547592 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-scripts\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.550468 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxddl\" (UniqueName: \"kubernetes.io/projected/5b221907-3033-4f08-b4b4-78fca89f7876-kube-api-access-dxddl\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.570813 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-config-data\") pod \"nova-cell0-conductor-db-sync-tmjn4\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.618743 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b5c0e428-98ad-4bda-aba1-685f1b5c8009","Type":"ContainerStarted","Data":"9fa30f7d0520cd0418dcfda2726f777f1cb75741e57e11f9dc860a7c7c4a4e9d"} Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.649541 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.38386538 podStartE2EDuration="37.649515792s" podCreationTimestamp="2025-12-04 15:25:36 +0000 UTC" firstStartedPulling="2025-12-04 15:25:37.874355878 +0000 UTC m=+1388.760399519" lastFinishedPulling="2025-12-04 15:26:12.14000629 +0000 UTC m=+1423.026049931" observedRunningTime="2025-12-04 15:26:13.643798808 +0000 UTC m=+1424.529842449" watchObservedRunningTime="2025-12-04 15:26:13.649515792 +0000 UTC m=+1424.535559433" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.659257 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:13 crc kubenswrapper[4946]: I1204 15:26:13.675275 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Dec 04 15:26:14 crc kubenswrapper[4946]: I1204 15:26:14.365041 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tmjn4"] Dec 04 15:26:14 crc kubenswrapper[4946]: W1204 15:26:14.366595 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b221907_3033_4f08_b4b4_78fca89f7876.slice/crio-de34e185b003391df5e00ef6dbff059c03fc5db81772c47e2a6ae559e7ae49e6 WatchSource:0}: Error finding container de34e185b003391df5e00ef6dbff059c03fc5db81772c47e2a6ae559e7ae49e6: Status 404 returned error can't find the container with id de34e185b003391df5e00ef6dbff059c03fc5db81772c47e2a6ae559e7ae49e6 Dec 04 15:26:14 crc kubenswrapper[4946]: I1204 15:26:14.654383 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tmjn4" event={"ID":"5b221907-3033-4f08-b4b4-78fca89f7876","Type":"ContainerStarted","Data":"de34e185b003391df5e00ef6dbff059c03fc5db81772c47e2a6ae559e7ae49e6"} Dec 04 15:26:14 crc kubenswrapper[4946]: I1204 15:26:14.785391 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 04 15:26:14 crc kubenswrapper[4946]: I1204 15:26:14.785895 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 04 15:26:14 crc kubenswrapper[4946]: I1204 15:26:14.894420 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 04 15:26:14 crc kubenswrapper[4946]: I1204 15:26:14.911472 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 04 15:26:15 crc kubenswrapper[4946]: I1204 15:26:15.709569 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 04 15:26:15 crc kubenswrapper[4946]: I1204 15:26:15.709640 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 04 15:26:17 crc kubenswrapper[4946]: I1204 15:26:17.731329 4946 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 15:26:17 crc kubenswrapper[4946]: I1204 15:26:17.732180 4946 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 15:26:17 crc kubenswrapper[4946]: I1204 15:26:17.889962 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:17 crc kubenswrapper[4946]: I1204 15:26:17.890076 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:17 crc kubenswrapper[4946]: I1204 15:26:17.943633 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:17 crc kubenswrapper[4946]: I1204 15:26:17.962362 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:17 crc kubenswrapper[4946]: I1204 15:26:17.999325 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="0203bca7-1453-4a7b-8597-5286d1d245b2" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.193:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 15:26:18 crc kubenswrapper[4946]: I1204 15:26:18.703825 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="0203bca7-1453-4a7b-8597-5286d1d245b2" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.193:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 15:26:18 crc kubenswrapper[4946]: I1204 15:26:18.745027 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:18 crc kubenswrapper[4946]: I1204 15:26:18.745108 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:22 crc kubenswrapper[4946]: I1204 15:26:22.478594 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:26:22 crc kubenswrapper[4946]: I1204 15:26:22.479378 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:26:22 crc kubenswrapper[4946]: I1204 15:26:22.479446 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:26:22 crc kubenswrapper[4946]: I1204 15:26:22.480395 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a4672e78e5cc3d2ad9bab9f7368c2628b00d850ecbb6c4792dcaf037af3ed10c"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:26:22 crc kubenswrapper[4946]: I1204 15:26:22.480450 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://a4672e78e5cc3d2ad9bab9f7368c2628b00d850ecbb6c4792dcaf037af3ed10c" gracePeriod=600 Dec 04 15:26:22 crc kubenswrapper[4946]: I1204 15:26:22.817156 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="a4672e78e5cc3d2ad9bab9f7368c2628b00d850ecbb6c4792dcaf037af3ed10c" exitCode=0 Dec 04 15:26:22 crc kubenswrapper[4946]: I1204 15:26:22.817243 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"a4672e78e5cc3d2ad9bab9f7368c2628b00d850ecbb6c4792dcaf037af3ed10c"} Dec 04 15:26:22 crc kubenswrapper[4946]: I1204 15:26:22.817597 4946 scope.go:117] "RemoveContainer" containerID="8a7e012c140a228f19f6cb14e9f9072b524033b54270efdccbdb4e5e1c52ef9d" Dec 04 15:26:23 crc kubenswrapper[4946]: I1204 15:26:23.586508 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 04 15:26:23 crc kubenswrapper[4946]: I1204 15:26:23.586675 4946 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 15:26:23 crc kubenswrapper[4946]: I1204 15:26:23.587401 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 04 15:26:23 crc kubenswrapper[4946]: I1204 15:26:23.594644 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:23 crc kubenswrapper[4946]: I1204 15:26:23.594801 4946 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 15:26:23 crc kubenswrapper[4946]: I1204 15:26:23.598942 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 04 15:26:28 crc kubenswrapper[4946]: I1204 15:26:28.924313 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f"} Dec 04 15:26:28 crc kubenswrapper[4946]: I1204 15:26:28.927591 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"511a9cd8-e1dc-4f14-b4df-847fda791a30","Type":"ContainerStarted","Data":"db5a36d6e4d0f309b80b724c643f9981ab59c0159a78f32249dae75ed7471a81"} Dec 04 15:26:28 crc kubenswrapper[4946]: I1204 15:26:28.927788 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="ceilometer-central-agent" containerID="cri-o://82361094770d1133fa93fa5940746142a1853727f06da55ed9ca80cc9594a136" gracePeriod=30 Dec 04 15:26:28 crc kubenswrapper[4946]: I1204 15:26:28.927839 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="ceilometer-notification-agent" containerID="cri-o://ae2dcbec5f6c9d27cf104b3aca7dd1babf779aa9ed8e43c512eba34160c84f7c" gracePeriod=30 Dec 04 15:26:28 crc kubenswrapper[4946]: I1204 15:26:28.927879 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 15:26:28 crc kubenswrapper[4946]: I1204 15:26:28.927825 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="proxy-httpd" containerID="cri-o://db5a36d6e4d0f309b80b724c643f9981ab59c0159a78f32249dae75ed7471a81" gracePeriod=30 Dec 04 15:26:28 crc kubenswrapper[4946]: I1204 15:26:28.927801 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="sg-core" containerID="cri-o://a5bb6013d244c1acc4273f94ed331aff51570fa19cd7406ad2f1730877eb6b53" gracePeriod=30 Dec 04 15:26:28 crc kubenswrapper[4946]: I1204 15:26:28.935921 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tmjn4" event={"ID":"5b221907-3033-4f08-b4b4-78fca89f7876","Type":"ContainerStarted","Data":"0dceceb24243ed91dc3044c19eacddb35e8980a5052656405a7ca76c46158eda"} Dec 04 15:26:28 crc kubenswrapper[4946]: I1204 15:26:28.972424 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-tmjn4" podStartSLOduration=1.9847757000000001 podStartE2EDuration="15.972397087s" podCreationTimestamp="2025-12-04 15:26:13 +0000 UTC" firstStartedPulling="2025-12-04 15:26:14.369520627 +0000 UTC m=+1425.255564258" lastFinishedPulling="2025-12-04 15:26:28.357142004 +0000 UTC m=+1439.243185645" observedRunningTime="2025-12-04 15:26:28.968383358 +0000 UTC m=+1439.854426999" watchObservedRunningTime="2025-12-04 15:26:28.972397087 +0000 UTC m=+1439.858440728" Dec 04 15:26:28 crc kubenswrapper[4946]: I1204 15:26:28.993297 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.164013212 podStartE2EDuration="31.993271549s" podCreationTimestamp="2025-12-04 15:25:57 +0000 UTC" firstStartedPulling="2025-12-04 15:25:58.523760896 +0000 UTC m=+1409.409804557" lastFinishedPulling="2025-12-04 15:26:28.353019253 +0000 UTC m=+1439.239062894" observedRunningTime="2025-12-04 15:26:28.988208933 +0000 UTC m=+1439.874252564" watchObservedRunningTime="2025-12-04 15:26:28.993271549 +0000 UTC m=+1439.879315190" Dec 04 15:26:29 crc kubenswrapper[4946]: I1204 15:26:29.949530 4946 generic.go:334] "Generic (PLEG): container finished" podID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerID="db5a36d6e4d0f309b80b724c643f9981ab59c0159a78f32249dae75ed7471a81" exitCode=0 Dec 04 15:26:29 crc kubenswrapper[4946]: I1204 15:26:29.950242 4946 generic.go:334] "Generic (PLEG): container finished" podID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerID="a5bb6013d244c1acc4273f94ed331aff51570fa19cd7406ad2f1730877eb6b53" exitCode=2 Dec 04 15:26:29 crc kubenswrapper[4946]: I1204 15:26:29.950259 4946 generic.go:334] "Generic (PLEG): container finished" podID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerID="82361094770d1133fa93fa5940746142a1853727f06da55ed9ca80cc9594a136" exitCode=0 Dec 04 15:26:29 crc kubenswrapper[4946]: I1204 15:26:29.951422 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"511a9cd8-e1dc-4f14-b4df-847fda791a30","Type":"ContainerDied","Data":"db5a36d6e4d0f309b80b724c643f9981ab59c0159a78f32249dae75ed7471a81"} Dec 04 15:26:29 crc kubenswrapper[4946]: I1204 15:26:29.951452 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"511a9cd8-e1dc-4f14-b4df-847fda791a30","Type":"ContainerDied","Data":"a5bb6013d244c1acc4273f94ed331aff51570fa19cd7406ad2f1730877eb6b53"} Dec 04 15:26:29 crc kubenswrapper[4946]: I1204 15:26:29.951464 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"511a9cd8-e1dc-4f14-b4df-847fda791a30","Type":"ContainerDied","Data":"82361094770d1133fa93fa5940746142a1853727f06da55ed9ca80cc9594a136"} Dec 04 15:26:30 crc kubenswrapper[4946]: I1204 15:26:30.996171 4946 generic.go:334] "Generic (PLEG): container finished" podID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerID="ae2dcbec5f6c9d27cf104b3aca7dd1babf779aa9ed8e43c512eba34160c84f7c" exitCode=0 Dec 04 15:26:30 crc kubenswrapper[4946]: I1204 15:26:30.996546 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"511a9cd8-e1dc-4f14-b4df-847fda791a30","Type":"ContainerDied","Data":"ae2dcbec5f6c9d27cf104b3aca7dd1babf779aa9ed8e43c512eba34160c84f7c"} Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.135364 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.216931 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-config-data\") pod \"511a9cd8-e1dc-4f14-b4df-847fda791a30\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.217079 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-run-httpd\") pod \"511a9cd8-e1dc-4f14-b4df-847fda791a30\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.217267 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-combined-ca-bundle\") pod \"511a9cd8-e1dc-4f14-b4df-847fda791a30\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.217385 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-scripts\") pod \"511a9cd8-e1dc-4f14-b4df-847fda791a30\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.217477 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-sg-core-conf-yaml\") pod \"511a9cd8-e1dc-4f14-b4df-847fda791a30\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.217582 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vt95\" (UniqueName: \"kubernetes.io/projected/511a9cd8-e1dc-4f14-b4df-847fda791a30-kube-api-access-7vt95\") pod \"511a9cd8-e1dc-4f14-b4df-847fda791a30\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.217657 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-log-httpd\") pod \"511a9cd8-e1dc-4f14-b4df-847fda791a30\" (UID: \"511a9cd8-e1dc-4f14-b4df-847fda791a30\") " Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.219038 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "511a9cd8-e1dc-4f14-b4df-847fda791a30" (UID: "511a9cd8-e1dc-4f14-b4df-847fda791a30"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.219574 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "511a9cd8-e1dc-4f14-b4df-847fda791a30" (UID: "511a9cd8-e1dc-4f14-b4df-847fda791a30"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.226075 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-scripts" (OuterVolumeSpecName: "scripts") pod "511a9cd8-e1dc-4f14-b4df-847fda791a30" (UID: "511a9cd8-e1dc-4f14-b4df-847fda791a30"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.230265 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/511a9cd8-e1dc-4f14-b4df-847fda791a30-kube-api-access-7vt95" (OuterVolumeSpecName: "kube-api-access-7vt95") pod "511a9cd8-e1dc-4f14-b4df-847fda791a30" (UID: "511a9cd8-e1dc-4f14-b4df-847fda791a30"). InnerVolumeSpecName "kube-api-access-7vt95". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.269793 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "511a9cd8-e1dc-4f14-b4df-847fda791a30" (UID: "511a9cd8-e1dc-4f14-b4df-847fda791a30"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.320323 4946 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.320367 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.320359 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "511a9cd8-e1dc-4f14-b4df-847fda791a30" (UID: "511a9cd8-e1dc-4f14-b4df-847fda791a30"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.320377 4946 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.320478 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vt95\" (UniqueName: \"kubernetes.io/projected/511a9cd8-e1dc-4f14-b4df-847fda791a30-kube-api-access-7vt95\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.320491 4946 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/511a9cd8-e1dc-4f14-b4df-847fda791a30-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.353175 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-config-data" (OuterVolumeSpecName: "config-data") pod "511a9cd8-e1dc-4f14-b4df-847fda791a30" (UID: "511a9cd8-e1dc-4f14-b4df-847fda791a30"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.422073 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:31 crc kubenswrapper[4946]: I1204 15:26:31.422133 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/511a9cd8-e1dc-4f14-b4df-847fda791a30-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.016585 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"511a9cd8-e1dc-4f14-b4df-847fda791a30","Type":"ContainerDied","Data":"4c822496184cab6148427f6b131c312c64110151ab02869a05728914b12bfd3b"} Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.016668 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.017095 4946 scope.go:117] "RemoveContainer" containerID="db5a36d6e4d0f309b80b724c643f9981ab59c0159a78f32249dae75ed7471a81" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.065810 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.078620 4946 scope.go:117] "RemoveContainer" containerID="a5bb6013d244c1acc4273f94ed331aff51570fa19cd7406ad2f1730877eb6b53" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.086822 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.108013 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:32 crc kubenswrapper[4946]: E1204 15:26:32.108962 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="sg-core" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.109054 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="sg-core" Dec 04 15:26:32 crc kubenswrapper[4946]: E1204 15:26:32.109194 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="ceilometer-central-agent" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.109309 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="ceilometer-central-agent" Dec 04 15:26:32 crc kubenswrapper[4946]: E1204 15:26:32.109415 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="ceilometer-notification-agent" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.109490 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="ceilometer-notification-agent" Dec 04 15:26:32 crc kubenswrapper[4946]: E1204 15:26:32.109576 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="proxy-httpd" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.109652 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="proxy-httpd" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.109974 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="sg-core" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.110073 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="proxy-httpd" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.110221 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="ceilometer-central-agent" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.110312 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" containerName="ceilometer-notification-agent" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.112999 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.116809 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.117020 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.117707 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.132248 4946 scope.go:117] "RemoveContainer" containerID="ae2dcbec5f6c9d27cf104b3aca7dd1babf779aa9ed8e43c512eba34160c84f7c" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.247453 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-log-httpd\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.247528 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.247591 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-scripts\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.247611 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.247633 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfdmn\" (UniqueName: \"kubernetes.io/projected/befce96d-dfb0-4dd4-96da-7614ff74b402-kube-api-access-vfdmn\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.247649 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-run-httpd\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.247669 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-config-data\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.349097 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-scripts\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.349180 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.349213 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfdmn\" (UniqueName: \"kubernetes.io/projected/befce96d-dfb0-4dd4-96da-7614ff74b402-kube-api-access-vfdmn\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.349236 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-run-httpd\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.349717 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-config-data\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.349977 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-log-httpd\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.350048 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.372190 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-run-httpd\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.372781 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-log-httpd\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.377374 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.378405 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-config-data\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.380993 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.381576 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-scripts\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.394912 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfdmn\" (UniqueName: \"kubernetes.io/projected/befce96d-dfb0-4dd4-96da-7614ff74b402-kube-api-access-vfdmn\") pod \"ceilometer-0\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " pod="openstack/ceilometer-0" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.415674 4946 scope.go:117] "RemoveContainer" containerID="82361094770d1133fa93fa5940746142a1853727f06da55ed9ca80cc9594a136" Dec 04 15:26:32 crc kubenswrapper[4946]: I1204 15:26:32.668838 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:26:33 crc kubenswrapper[4946]: I1204 15:26:33.198945 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:33 crc kubenswrapper[4946]: I1204 15:26:33.469273 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="511a9cd8-e1dc-4f14-b4df-847fda791a30" path="/var/lib/kubelet/pods/511a9cd8-e1dc-4f14-b4df-847fda791a30/volumes" Dec 04 15:26:34 crc kubenswrapper[4946]: I1204 15:26:34.044842 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"befce96d-dfb0-4dd4-96da-7614ff74b402","Type":"ContainerStarted","Data":"03162e6069fca792e4ed8519795ac2d4917e18d5648f94ea6bd1f9caa5e64f4c"} Dec 04 15:26:34 crc kubenswrapper[4946]: I1204 15:26:34.358784 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:36 crc kubenswrapper[4946]: I1204 15:26:36.074075 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"befce96d-dfb0-4dd4-96da-7614ff74b402","Type":"ContainerStarted","Data":"fbaeee1113268466f4494008714eeb8ab928ed8603995ee6e767133e6948fcbd"} Dec 04 15:26:37 crc kubenswrapper[4946]: I1204 15:26:37.090805 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"befce96d-dfb0-4dd4-96da-7614ff74b402","Type":"ContainerStarted","Data":"7d423b6e69c667a2a9a2cebdb56bb21934b4440dc9915b3eae520b5aefeaf03d"} Dec 04 15:26:37 crc kubenswrapper[4946]: I1204 15:26:37.091297 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"befce96d-dfb0-4dd4-96da-7614ff74b402","Type":"ContainerStarted","Data":"00585bcad23da92f596052a79b4e96e233a5a5a683d8d96381cfb02a5c755c1d"} Dec 04 15:26:39 crc kubenswrapper[4946]: I1204 15:26:39.115343 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"befce96d-dfb0-4dd4-96da-7614ff74b402","Type":"ContainerStarted","Data":"c25b4a2a7586f0df66d7c3c26a5181c6c50dbac626bb7d7d41fb7cb4de384e0b"} Dec 04 15:26:39 crc kubenswrapper[4946]: I1204 15:26:39.116056 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 15:26:39 crc kubenswrapper[4946]: I1204 15:26:39.115669 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="ceilometer-notification-agent" containerID="cri-o://00585bcad23da92f596052a79b4e96e233a5a5a683d8d96381cfb02a5c755c1d" gracePeriod=30 Dec 04 15:26:39 crc kubenswrapper[4946]: I1204 15:26:39.115578 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="ceilometer-central-agent" containerID="cri-o://fbaeee1113268466f4494008714eeb8ab928ed8603995ee6e767133e6948fcbd" gracePeriod=30 Dec 04 15:26:39 crc kubenswrapper[4946]: I1204 15:26:39.115729 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="proxy-httpd" containerID="cri-o://c25b4a2a7586f0df66d7c3c26a5181c6c50dbac626bb7d7d41fb7cb4de384e0b" gracePeriod=30 Dec 04 15:26:39 crc kubenswrapper[4946]: I1204 15:26:39.115695 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="sg-core" containerID="cri-o://7d423b6e69c667a2a9a2cebdb56bb21934b4440dc9915b3eae520b5aefeaf03d" gracePeriod=30 Dec 04 15:26:40 crc kubenswrapper[4946]: I1204 15:26:40.135810 4946 generic.go:334] "Generic (PLEG): container finished" podID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerID="c25b4a2a7586f0df66d7c3c26a5181c6c50dbac626bb7d7d41fb7cb4de384e0b" exitCode=0 Dec 04 15:26:40 crc kubenswrapper[4946]: I1204 15:26:40.135846 4946 generic.go:334] "Generic (PLEG): container finished" podID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerID="7d423b6e69c667a2a9a2cebdb56bb21934b4440dc9915b3eae520b5aefeaf03d" exitCode=2 Dec 04 15:26:40 crc kubenswrapper[4946]: I1204 15:26:40.135855 4946 generic.go:334] "Generic (PLEG): container finished" podID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerID="00585bcad23da92f596052a79b4e96e233a5a5a683d8d96381cfb02a5c755c1d" exitCode=0 Dec 04 15:26:40 crc kubenswrapper[4946]: I1204 15:26:40.135899 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"befce96d-dfb0-4dd4-96da-7614ff74b402","Type":"ContainerDied","Data":"c25b4a2a7586f0df66d7c3c26a5181c6c50dbac626bb7d7d41fb7cb4de384e0b"} Dec 04 15:26:40 crc kubenswrapper[4946]: I1204 15:26:40.135930 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"befce96d-dfb0-4dd4-96da-7614ff74b402","Type":"ContainerDied","Data":"7d423b6e69c667a2a9a2cebdb56bb21934b4440dc9915b3eae520b5aefeaf03d"} Dec 04 15:26:40 crc kubenswrapper[4946]: I1204 15:26:40.135940 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"befce96d-dfb0-4dd4-96da-7614ff74b402","Type":"ContainerDied","Data":"00585bcad23da92f596052a79b4e96e233a5a5a683d8d96381cfb02a5c755c1d"} Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.337677 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.719116529 podStartE2EDuration="9.337649699s" podCreationTimestamp="2025-12-04 15:26:32 +0000 UTC" firstStartedPulling="2025-12-04 15:26:33.20815768 +0000 UTC m=+1444.094201321" lastFinishedPulling="2025-12-04 15:26:37.82669085 +0000 UTC m=+1448.712734491" observedRunningTime="2025-12-04 15:26:39.14568628 +0000 UTC m=+1450.031729941" watchObservedRunningTime="2025-12-04 15:26:41.337649699 +0000 UTC m=+1452.223693340" Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.338132 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-plqg4"] Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.340732 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.368110 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-plqg4"] Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.455187 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-utilities\") pod \"redhat-operators-plqg4\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.455329 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttbn5\" (UniqueName: \"kubernetes.io/projected/e1d4637f-17ff-456c-9764-dd305c6e3917-kube-api-access-ttbn5\") pod \"redhat-operators-plqg4\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.455384 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-catalog-content\") pod \"redhat-operators-plqg4\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.557831 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-utilities\") pod \"redhat-operators-plqg4\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.557930 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttbn5\" (UniqueName: \"kubernetes.io/projected/e1d4637f-17ff-456c-9764-dd305c6e3917-kube-api-access-ttbn5\") pod \"redhat-operators-plqg4\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.557963 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-catalog-content\") pod \"redhat-operators-plqg4\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.559396 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-catalog-content\") pod \"redhat-operators-plqg4\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.560013 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-utilities\") pod \"redhat-operators-plqg4\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.587829 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttbn5\" (UniqueName: \"kubernetes.io/projected/e1d4637f-17ff-456c-9764-dd305c6e3917-kube-api-access-ttbn5\") pod \"redhat-operators-plqg4\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:26:41 crc kubenswrapper[4946]: I1204 15:26:41.664667 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:26:42 crc kubenswrapper[4946]: I1204 15:26:42.204870 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-plqg4"] Dec 04 15:26:42 crc kubenswrapper[4946]: W1204 15:26:42.209470 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1d4637f_17ff_456c_9764_dd305c6e3917.slice/crio-1369b78926fcb17cbfc3f4b6586df55abd5c6794e8d42cc804c0e6898751f233 WatchSource:0}: Error finding container 1369b78926fcb17cbfc3f4b6586df55abd5c6794e8d42cc804c0e6898751f233: Status 404 returned error can't find the container with id 1369b78926fcb17cbfc3f4b6586df55abd5c6794e8d42cc804c0e6898751f233 Dec 04 15:26:43 crc kubenswrapper[4946]: I1204 15:26:43.169185 4946 generic.go:334] "Generic (PLEG): container finished" podID="e1d4637f-17ff-456c-9764-dd305c6e3917" containerID="b639cc5eadcc71a9941089bbca478789ca9c2e9958b640434f64f96571b0470b" exitCode=0 Dec 04 15:26:43 crc kubenswrapper[4946]: I1204 15:26:43.169294 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-plqg4" event={"ID":"e1d4637f-17ff-456c-9764-dd305c6e3917","Type":"ContainerDied","Data":"b639cc5eadcc71a9941089bbca478789ca9c2e9958b640434f64f96571b0470b"} Dec 04 15:26:43 crc kubenswrapper[4946]: I1204 15:26:43.169587 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-plqg4" event={"ID":"e1d4637f-17ff-456c-9764-dd305c6e3917","Type":"ContainerStarted","Data":"1369b78926fcb17cbfc3f4b6586df55abd5c6794e8d42cc804c0e6898751f233"} Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.196320 4946 generic.go:334] "Generic (PLEG): container finished" podID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerID="fbaeee1113268466f4494008714eeb8ab928ed8603995ee6e767133e6948fcbd" exitCode=0 Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.196435 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"befce96d-dfb0-4dd4-96da-7614ff74b402","Type":"ContainerDied","Data":"fbaeee1113268466f4494008714eeb8ab928ed8603995ee6e767133e6948fcbd"} Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.200444 4946 generic.go:334] "Generic (PLEG): container finished" podID="e1d4637f-17ff-456c-9764-dd305c6e3917" containerID="1230aca8e177488b97696cdc7756cbb89d937bccc36045b94960d075815c3205" exitCode=0 Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.200478 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-plqg4" event={"ID":"e1d4637f-17ff-456c-9764-dd305c6e3917","Type":"ContainerDied","Data":"1230aca8e177488b97696cdc7756cbb89d937bccc36045b94960d075815c3205"} Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.335058 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.466416 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-sg-core-conf-yaml\") pod \"befce96d-dfb0-4dd4-96da-7614ff74b402\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.466573 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-scripts\") pod \"befce96d-dfb0-4dd4-96da-7614ff74b402\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.466605 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-run-httpd\") pod \"befce96d-dfb0-4dd4-96da-7614ff74b402\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.466674 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-config-data\") pod \"befce96d-dfb0-4dd4-96da-7614ff74b402\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.466742 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-combined-ca-bundle\") pod \"befce96d-dfb0-4dd4-96da-7614ff74b402\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.466838 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfdmn\" (UniqueName: \"kubernetes.io/projected/befce96d-dfb0-4dd4-96da-7614ff74b402-kube-api-access-vfdmn\") pod \"befce96d-dfb0-4dd4-96da-7614ff74b402\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.467007 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-log-httpd\") pod \"befce96d-dfb0-4dd4-96da-7614ff74b402\" (UID: \"befce96d-dfb0-4dd4-96da-7614ff74b402\") " Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.469655 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "befce96d-dfb0-4dd4-96da-7614ff74b402" (UID: "befce96d-dfb0-4dd4-96da-7614ff74b402"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.469879 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "befce96d-dfb0-4dd4-96da-7614ff74b402" (UID: "befce96d-dfb0-4dd4-96da-7614ff74b402"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.481004 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/befce96d-dfb0-4dd4-96da-7614ff74b402-kube-api-access-vfdmn" (OuterVolumeSpecName: "kube-api-access-vfdmn") pod "befce96d-dfb0-4dd4-96da-7614ff74b402" (UID: "befce96d-dfb0-4dd4-96da-7614ff74b402"). InnerVolumeSpecName "kube-api-access-vfdmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.496392 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-scripts" (OuterVolumeSpecName: "scripts") pod "befce96d-dfb0-4dd4-96da-7614ff74b402" (UID: "befce96d-dfb0-4dd4-96da-7614ff74b402"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.514703 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "befce96d-dfb0-4dd4-96da-7614ff74b402" (UID: "befce96d-dfb0-4dd4-96da-7614ff74b402"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.570643 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfdmn\" (UniqueName: \"kubernetes.io/projected/befce96d-dfb0-4dd4-96da-7614ff74b402-kube-api-access-vfdmn\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.570687 4946 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.570699 4946 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.570710 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.570719 4946 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/befce96d-dfb0-4dd4-96da-7614ff74b402-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.577751 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "befce96d-dfb0-4dd4-96da-7614ff74b402" (UID: "befce96d-dfb0-4dd4-96da-7614ff74b402"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.602760 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-config-data" (OuterVolumeSpecName: "config-data") pod "befce96d-dfb0-4dd4-96da-7614ff74b402" (UID: "befce96d-dfb0-4dd4-96da-7614ff74b402"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.672096 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:45 crc kubenswrapper[4946]: I1204 15:26:45.672154 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/befce96d-dfb0-4dd4-96da-7614ff74b402-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.212451 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"befce96d-dfb0-4dd4-96da-7614ff74b402","Type":"ContainerDied","Data":"03162e6069fca792e4ed8519795ac2d4917e18d5648f94ea6bd1f9caa5e64f4c"} Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.212518 4946 scope.go:117] "RemoveContainer" containerID="c25b4a2a7586f0df66d7c3c26a5181c6c50dbac626bb7d7d41fb7cb4de384e0b" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.212706 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.326218 4946 scope.go:117] "RemoveContainer" containerID="7d423b6e69c667a2a9a2cebdb56bb21934b4440dc9915b3eae520b5aefeaf03d" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.332167 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.356232 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.362532 4946 scope.go:117] "RemoveContainer" containerID="00585bcad23da92f596052a79b4e96e233a5a5a683d8d96381cfb02a5c755c1d" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.386324 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:46 crc kubenswrapper[4946]: E1204 15:26:46.387032 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="ceilometer-notification-agent" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.387054 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="ceilometer-notification-agent" Dec 04 15:26:46 crc kubenswrapper[4946]: E1204 15:26:46.387089 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="proxy-httpd" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.387100 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="proxy-httpd" Dec 04 15:26:46 crc kubenswrapper[4946]: E1204 15:26:46.387116 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="sg-core" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.387144 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="sg-core" Dec 04 15:26:46 crc kubenswrapper[4946]: E1204 15:26:46.387172 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="ceilometer-central-agent" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.387184 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="ceilometer-central-agent" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.387442 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="ceilometer-central-agent" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.387461 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="sg-core" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.387487 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="ceilometer-notification-agent" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.387497 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" containerName="proxy-httpd" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.389986 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.393453 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.394580 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.403051 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.418707 4946 scope.go:117] "RemoveContainer" containerID="fbaeee1113268466f4494008714eeb8ab928ed8603995ee6e767133e6948fcbd" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.491991 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-config-data\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.492085 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-run-httpd\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.492217 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.492343 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-log-httpd\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.492442 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b852j\" (UniqueName: \"kubernetes.io/projected/e74405d1-7bee-4ce0-ae1e-982383f8a280-kube-api-access-b852j\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.492486 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.492532 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-scripts\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.594981 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-log-httpd\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.595519 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b852j\" (UniqueName: \"kubernetes.io/projected/e74405d1-7bee-4ce0-ae1e-982383f8a280-kube-api-access-b852j\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.595692 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.595838 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-scripts\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.596050 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-config-data\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.596192 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-run-httpd\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.595689 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-log-httpd\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.596310 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.596849 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-run-httpd\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.604334 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-scripts\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.605719 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-config-data\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.606194 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.606768 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.619836 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b852j\" (UniqueName: \"kubernetes.io/projected/e74405d1-7bee-4ce0-ae1e-982383f8a280-kube-api-access-b852j\") pod \"ceilometer-0\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " pod="openstack/ceilometer-0" Dec 04 15:26:46 crc kubenswrapper[4946]: I1204 15:26:46.727188 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:26:47 crc kubenswrapper[4946]: I1204 15:26:47.238432 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:26:47 crc kubenswrapper[4946]: W1204 15:26:47.249927 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode74405d1_7bee_4ce0_ae1e_982383f8a280.slice/crio-fbbb90b7b96d93bb4a1aa1588339056515fc3f24f756dbc850739992d1f48565 WatchSource:0}: Error finding container fbbb90b7b96d93bb4a1aa1588339056515fc3f24f756dbc850739992d1f48565: Status 404 returned error can't find the container with id fbbb90b7b96d93bb4a1aa1588339056515fc3f24f756dbc850739992d1f48565 Dec 04 15:26:47 crc kubenswrapper[4946]: I1204 15:26:47.465728 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="befce96d-dfb0-4dd4-96da-7614ff74b402" path="/var/lib/kubelet/pods/befce96d-dfb0-4dd4-96da-7614ff74b402/volumes" Dec 04 15:26:48 crc kubenswrapper[4946]: I1204 15:26:48.247922 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e74405d1-7bee-4ce0-ae1e-982383f8a280","Type":"ContainerStarted","Data":"fbbb90b7b96d93bb4a1aa1588339056515fc3f24f756dbc850739992d1f48565"} Dec 04 15:26:53 crc kubenswrapper[4946]: I1204 15:26:53.314215 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-plqg4" event={"ID":"e1d4637f-17ff-456c-9764-dd305c6e3917","Type":"ContainerStarted","Data":"d168994c47748309f8b562b57b2f3b75ec74d803184f5f374fad2fdcea780a02"} Dec 04 15:26:53 crc kubenswrapper[4946]: I1204 15:26:53.316733 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e74405d1-7bee-4ce0-ae1e-982383f8a280","Type":"ContainerStarted","Data":"9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741"} Dec 04 15:26:54 crc kubenswrapper[4946]: I1204 15:26:54.332439 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e74405d1-7bee-4ce0-ae1e-982383f8a280","Type":"ContainerStarted","Data":"e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471"} Dec 04 15:26:54 crc kubenswrapper[4946]: I1204 15:26:54.333143 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e74405d1-7bee-4ce0-ae1e-982383f8a280","Type":"ContainerStarted","Data":"28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9"} Dec 04 15:26:56 crc kubenswrapper[4946]: I1204 15:26:56.357963 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e74405d1-7bee-4ce0-ae1e-982383f8a280","Type":"ContainerStarted","Data":"c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db"} Dec 04 15:26:56 crc kubenswrapper[4946]: I1204 15:26:56.358653 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 15:26:56 crc kubenswrapper[4946]: I1204 15:26:56.415046 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-plqg4" podStartSLOduration=6.896663171 podStartE2EDuration="15.41502625s" podCreationTimestamp="2025-12-04 15:26:41 +0000 UTC" firstStartedPulling="2025-12-04 15:26:43.172867243 +0000 UTC m=+1454.058910884" lastFinishedPulling="2025-12-04 15:26:51.691230322 +0000 UTC m=+1462.577273963" observedRunningTime="2025-12-04 15:26:53.336757393 +0000 UTC m=+1464.222801044" watchObservedRunningTime="2025-12-04 15:26:56.41502625 +0000 UTC m=+1467.301069891" Dec 04 15:26:56 crc kubenswrapper[4946]: I1204 15:26:56.417245 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9471026 podStartE2EDuration="10.417234899s" podCreationTimestamp="2025-12-04 15:26:46 +0000 UTC" firstStartedPulling="2025-12-04 15:26:47.25737232 +0000 UTC m=+1458.143415961" lastFinishedPulling="2025-12-04 15:26:55.727504619 +0000 UTC m=+1466.613548260" observedRunningTime="2025-12-04 15:26:56.412038129 +0000 UTC m=+1467.298081770" watchObservedRunningTime="2025-12-04 15:26:56.417234899 +0000 UTC m=+1467.303278540" Dec 04 15:26:58 crc kubenswrapper[4946]: I1204 15:26:58.379885 4946 generic.go:334] "Generic (PLEG): container finished" podID="5b221907-3033-4f08-b4b4-78fca89f7876" containerID="0dceceb24243ed91dc3044c19eacddb35e8980a5052656405a7ca76c46158eda" exitCode=0 Dec 04 15:26:58 crc kubenswrapper[4946]: I1204 15:26:58.381735 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tmjn4" event={"ID":"5b221907-3033-4f08-b4b4-78fca89f7876","Type":"ContainerDied","Data":"0dceceb24243ed91dc3044c19eacddb35e8980a5052656405a7ca76c46158eda"} Dec 04 15:26:59 crc kubenswrapper[4946]: I1204 15:26:59.828895 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:26:59 crc kubenswrapper[4946]: I1204 15:26:59.952197 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxddl\" (UniqueName: \"kubernetes.io/projected/5b221907-3033-4f08-b4b4-78fca89f7876-kube-api-access-dxddl\") pod \"5b221907-3033-4f08-b4b4-78fca89f7876\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " Dec 04 15:26:59 crc kubenswrapper[4946]: I1204 15:26:59.952275 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-combined-ca-bundle\") pod \"5b221907-3033-4f08-b4b4-78fca89f7876\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " Dec 04 15:26:59 crc kubenswrapper[4946]: I1204 15:26:59.952396 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-config-data\") pod \"5b221907-3033-4f08-b4b4-78fca89f7876\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " Dec 04 15:26:59 crc kubenswrapper[4946]: I1204 15:26:59.952571 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-scripts\") pod \"5b221907-3033-4f08-b4b4-78fca89f7876\" (UID: \"5b221907-3033-4f08-b4b4-78fca89f7876\") " Dec 04 15:26:59 crc kubenswrapper[4946]: I1204 15:26:59.964321 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-scripts" (OuterVolumeSpecName: "scripts") pod "5b221907-3033-4f08-b4b4-78fca89f7876" (UID: "5b221907-3033-4f08-b4b4-78fca89f7876"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:59 crc kubenswrapper[4946]: I1204 15:26:59.965253 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b221907-3033-4f08-b4b4-78fca89f7876-kube-api-access-dxddl" (OuterVolumeSpecName: "kube-api-access-dxddl") pod "5b221907-3033-4f08-b4b4-78fca89f7876" (UID: "5b221907-3033-4f08-b4b4-78fca89f7876"). InnerVolumeSpecName "kube-api-access-dxddl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:26:59 crc kubenswrapper[4946]: I1204 15:26:59.989733 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5b221907-3033-4f08-b4b4-78fca89f7876" (UID: "5b221907-3033-4f08-b4b4-78fca89f7876"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:26:59 crc kubenswrapper[4946]: I1204 15:26:59.990349 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-config-data" (OuterVolumeSpecName: "config-data") pod "5b221907-3033-4f08-b4b4-78fca89f7876" (UID: "5b221907-3033-4f08-b4b4-78fca89f7876"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.055917 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.055973 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxddl\" (UniqueName: \"kubernetes.io/projected/5b221907-3033-4f08-b4b4-78fca89f7876-kube-api-access-dxddl\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.055993 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.056005 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b221907-3033-4f08-b4b4-78fca89f7876-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.410876 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-tmjn4" event={"ID":"5b221907-3033-4f08-b4b4-78fca89f7876","Type":"ContainerDied","Data":"de34e185b003391df5e00ef6dbff059c03fc5db81772c47e2a6ae559e7ae49e6"} Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.410938 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de34e185b003391df5e00ef6dbff059c03fc5db81772c47e2a6ae559e7ae49e6" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.411040 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-tmjn4" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.546775 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 15:27:00 crc kubenswrapper[4946]: E1204 15:27:00.547545 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b221907-3033-4f08-b4b4-78fca89f7876" containerName="nova-cell0-conductor-db-sync" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.547573 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b221907-3033-4f08-b4b4-78fca89f7876" containerName="nova-cell0-conductor-db-sync" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.547858 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b221907-3033-4f08-b4b4-78fca89f7876" containerName="nova-cell0-conductor-db-sync" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.549025 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.554522 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.554946 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-rnhps" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.562096 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.680835 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.681353 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtxz6\" (UniqueName: \"kubernetes.io/projected/665f0a75-0872-48ba-8c1c-fffbc874a197-kube-api-access-mtxz6\") pod \"nova-cell0-conductor-0\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.681539 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.785079 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.785223 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.785353 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtxz6\" (UniqueName: \"kubernetes.io/projected/665f0a75-0872-48ba-8c1c-fffbc874a197-kube-api-access-mtxz6\") pod \"nova-cell0-conductor-0\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.793012 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.798418 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.810297 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtxz6\" (UniqueName: \"kubernetes.io/projected/665f0a75-0872-48ba-8c1c-fffbc874a197-kube-api-access-mtxz6\") pod \"nova-cell0-conductor-0\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:00 crc kubenswrapper[4946]: I1204 15:27:00.877375 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:01 crc kubenswrapper[4946]: I1204 15:27:01.410429 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 15:27:01 crc kubenswrapper[4946]: I1204 15:27:01.664830 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:27:01 crc kubenswrapper[4946]: I1204 15:27:01.665012 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:27:01 crc kubenswrapper[4946]: I1204 15:27:01.751793 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:27:02 crc kubenswrapper[4946]: I1204 15:27:02.441087 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"665f0a75-0872-48ba-8c1c-fffbc874a197","Type":"ContainerStarted","Data":"95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1"} Dec 04 15:27:02 crc kubenswrapper[4946]: I1204 15:27:02.441191 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"665f0a75-0872-48ba-8c1c-fffbc874a197","Type":"ContainerStarted","Data":"05c25d5dbcaebc7574728c119165be33d9b9a6a05a07f9f1bcfbbc49539ce173"} Dec 04 15:27:02 crc kubenswrapper[4946]: I1204 15:27:02.441474 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:02 crc kubenswrapper[4946]: I1204 15:27:02.484805 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.484770483 podStartE2EDuration="2.484770483s" podCreationTimestamp="2025-12-04 15:27:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:27:02.468562836 +0000 UTC m=+1473.354606477" watchObservedRunningTime="2025-12-04 15:27:02.484770483 +0000 UTC m=+1473.370814124" Dec 04 15:27:02 crc kubenswrapper[4946]: I1204 15:27:02.523961 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:27:02 crc kubenswrapper[4946]: I1204 15:27:02.592514 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-plqg4"] Dec 04 15:27:04 crc kubenswrapper[4946]: I1204 15:27:04.482469 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-plqg4" podUID="e1d4637f-17ff-456c-9764-dd305c6e3917" containerName="registry-server" containerID="cri-o://d168994c47748309f8b562b57b2f3b75ec74d803184f5f374fad2fdcea780a02" gracePeriod=2 Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.066177 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.066972 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="665f0a75-0872-48ba-8c1c-fffbc874a197" containerName="nova-cell0-conductor-conductor" containerID="cri-o://95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1" gracePeriod=30 Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.497056 4946 generic.go:334] "Generic (PLEG): container finished" podID="e1d4637f-17ff-456c-9764-dd305c6e3917" containerID="d168994c47748309f8b562b57b2f3b75ec74d803184f5f374fad2fdcea780a02" exitCode=0 Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.497154 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-plqg4" event={"ID":"e1d4637f-17ff-456c-9764-dd305c6e3917","Type":"ContainerDied","Data":"d168994c47748309f8b562b57b2f3b75ec74d803184f5f374fad2fdcea780a02"} Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.497212 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-plqg4" event={"ID":"e1d4637f-17ff-456c-9764-dd305c6e3917","Type":"ContainerDied","Data":"1369b78926fcb17cbfc3f4b6586df55abd5c6794e8d42cc804c0e6898751f233"} Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.497226 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1369b78926fcb17cbfc3f4b6586df55abd5c6794e8d42cc804c0e6898751f233" Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.507854 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.623653 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttbn5\" (UniqueName: \"kubernetes.io/projected/e1d4637f-17ff-456c-9764-dd305c6e3917-kube-api-access-ttbn5\") pod \"e1d4637f-17ff-456c-9764-dd305c6e3917\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.623759 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-utilities\") pod \"e1d4637f-17ff-456c-9764-dd305c6e3917\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.623854 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-catalog-content\") pod \"e1d4637f-17ff-456c-9764-dd305c6e3917\" (UID: \"e1d4637f-17ff-456c-9764-dd305c6e3917\") " Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.626522 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-utilities" (OuterVolumeSpecName: "utilities") pod "e1d4637f-17ff-456c-9764-dd305c6e3917" (UID: "e1d4637f-17ff-456c-9764-dd305c6e3917"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.632775 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1d4637f-17ff-456c-9764-dd305c6e3917-kube-api-access-ttbn5" (OuterVolumeSpecName: "kube-api-access-ttbn5") pod "e1d4637f-17ff-456c-9764-dd305c6e3917" (UID: "e1d4637f-17ff-456c-9764-dd305c6e3917"). InnerVolumeSpecName "kube-api-access-ttbn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.726149 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttbn5\" (UniqueName: \"kubernetes.io/projected/e1d4637f-17ff-456c-9764-dd305c6e3917-kube-api-access-ttbn5\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.726187 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.750788 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e1d4637f-17ff-456c-9764-dd305c6e3917" (UID: "e1d4637f-17ff-456c-9764-dd305c6e3917"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:27:05 crc kubenswrapper[4946]: I1204 15:27:05.828205 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1d4637f-17ff-456c-9764-dd305c6e3917-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.347568 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.438813 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-config-data\") pod \"665f0a75-0872-48ba-8c1c-fffbc874a197\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.439042 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-combined-ca-bundle\") pod \"665f0a75-0872-48ba-8c1c-fffbc874a197\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.439261 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtxz6\" (UniqueName: \"kubernetes.io/projected/665f0a75-0872-48ba-8c1c-fffbc874a197-kube-api-access-mtxz6\") pod \"665f0a75-0872-48ba-8c1c-fffbc874a197\" (UID: \"665f0a75-0872-48ba-8c1c-fffbc874a197\") " Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.446173 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/665f0a75-0872-48ba-8c1c-fffbc874a197-kube-api-access-mtxz6" (OuterVolumeSpecName: "kube-api-access-mtxz6") pod "665f0a75-0872-48ba-8c1c-fffbc874a197" (UID: "665f0a75-0872-48ba-8c1c-fffbc874a197"). InnerVolumeSpecName "kube-api-access-mtxz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.471664 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "665f0a75-0872-48ba-8c1c-fffbc874a197" (UID: "665f0a75-0872-48ba-8c1c-fffbc874a197"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.480503 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-config-data" (OuterVolumeSpecName: "config-data") pod "665f0a75-0872-48ba-8c1c-fffbc874a197" (UID: "665f0a75-0872-48ba-8c1c-fffbc874a197"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.508836 4946 generic.go:334] "Generic (PLEG): container finished" podID="665f0a75-0872-48ba-8c1c-fffbc874a197" containerID="95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1" exitCode=0 Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.508935 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-plqg4" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.509180 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.509208 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"665f0a75-0872-48ba-8c1c-fffbc874a197","Type":"ContainerDied","Data":"95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1"} Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.509268 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"665f0a75-0872-48ba-8c1c-fffbc874a197","Type":"ContainerDied","Data":"05c25d5dbcaebc7574728c119165be33d9b9a6a05a07f9f1bcfbbc49539ce173"} Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.509339 4946 scope.go:117] "RemoveContainer" containerID="95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.541780 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtxz6\" (UniqueName: \"kubernetes.io/projected/665f0a75-0872-48ba-8c1c-fffbc874a197-kube-api-access-mtxz6\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.541830 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.541844 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/665f0a75-0872-48ba-8c1c-fffbc874a197-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.548730 4946 scope.go:117] "RemoveContainer" containerID="95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1" Dec 04 15:27:06 crc kubenswrapper[4946]: E1204 15:27:06.552077 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1\": container with ID starting with 95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1 not found: ID does not exist" containerID="95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.552172 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1"} err="failed to get container status \"95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1\": rpc error: code = NotFound desc = could not find container \"95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1\": container with ID starting with 95450bd3458ff6b1ed1b15910439967d81ba76793d01b3681bf778bf69e868b1 not found: ID does not exist" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.569775 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-plqg4"] Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.589952 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-plqg4"] Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.603862 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.618239 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.630658 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 15:27:06 crc kubenswrapper[4946]: E1204 15:27:06.631255 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="665f0a75-0872-48ba-8c1c-fffbc874a197" containerName="nova-cell0-conductor-conductor" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.631278 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="665f0a75-0872-48ba-8c1c-fffbc874a197" containerName="nova-cell0-conductor-conductor" Dec 04 15:27:06 crc kubenswrapper[4946]: E1204 15:27:06.631306 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1d4637f-17ff-456c-9764-dd305c6e3917" containerName="extract-content" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.631314 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1d4637f-17ff-456c-9764-dd305c6e3917" containerName="extract-content" Dec 04 15:27:06 crc kubenswrapper[4946]: E1204 15:27:06.631341 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1d4637f-17ff-456c-9764-dd305c6e3917" containerName="extract-utilities" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.631350 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1d4637f-17ff-456c-9764-dd305c6e3917" containerName="extract-utilities" Dec 04 15:27:06 crc kubenswrapper[4946]: E1204 15:27:06.631376 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1d4637f-17ff-456c-9764-dd305c6e3917" containerName="registry-server" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.631382 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1d4637f-17ff-456c-9764-dd305c6e3917" containerName="registry-server" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.631586 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1d4637f-17ff-456c-9764-dd305c6e3917" containerName="registry-server" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.631616 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="665f0a75-0872-48ba-8c1c-fffbc874a197" containerName="nova-cell0-conductor-conductor" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.644981 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.645158 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.649333 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.649689 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-rnhps" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.745416 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9thnj\" (UniqueName: \"kubernetes.io/projected/75446cac-ffe3-4e3a-9bde-e8372b8318c3-kube-api-access-9thnj\") pod \"nova-cell0-conductor-0\" (UID: \"75446cac-ffe3-4e3a-9bde-e8372b8318c3\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.745806 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75446cac-ffe3-4e3a-9bde-e8372b8318c3-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"75446cac-ffe3-4e3a-9bde-e8372b8318c3\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.745996 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75446cac-ffe3-4e3a-9bde-e8372b8318c3-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"75446cac-ffe3-4e3a-9bde-e8372b8318c3\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.848681 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75446cac-ffe3-4e3a-9bde-e8372b8318c3-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"75446cac-ffe3-4e3a-9bde-e8372b8318c3\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.848935 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9thnj\" (UniqueName: \"kubernetes.io/projected/75446cac-ffe3-4e3a-9bde-e8372b8318c3-kube-api-access-9thnj\") pod \"nova-cell0-conductor-0\" (UID: \"75446cac-ffe3-4e3a-9bde-e8372b8318c3\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.849008 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75446cac-ffe3-4e3a-9bde-e8372b8318c3-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"75446cac-ffe3-4e3a-9bde-e8372b8318c3\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.857043 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75446cac-ffe3-4e3a-9bde-e8372b8318c3-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"75446cac-ffe3-4e3a-9bde-e8372b8318c3\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.857086 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75446cac-ffe3-4e3a-9bde-e8372b8318c3-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"75446cac-ffe3-4e3a-9bde-e8372b8318c3\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.874751 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9thnj\" (UniqueName: \"kubernetes.io/projected/75446cac-ffe3-4e3a-9bde-e8372b8318c3-kube-api-access-9thnj\") pod \"nova-cell0-conductor-0\" (UID: \"75446cac-ffe3-4e3a-9bde-e8372b8318c3\") " pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.943522 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.943856 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="ceilometer-central-agent" containerID="cri-o://9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741" gracePeriod=30 Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.943933 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="sg-core" containerID="cri-o://e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471" gracePeriod=30 Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.943958 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="proxy-httpd" containerID="cri-o://c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db" gracePeriod=30 Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.944004 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="ceilometer-notification-agent" containerID="cri-o://28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9" gracePeriod=30 Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.957177 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.205:3000/\": EOF" Dec 04 15:27:06 crc kubenswrapper[4946]: I1204 15:27:06.998951 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:07 crc kubenswrapper[4946]: I1204 15:27:07.480304 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="665f0a75-0872-48ba-8c1c-fffbc874a197" path="/var/lib/kubelet/pods/665f0a75-0872-48ba-8c1c-fffbc874a197/volumes" Dec 04 15:27:07 crc kubenswrapper[4946]: I1204 15:27:07.481795 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1d4637f-17ff-456c-9764-dd305c6e3917" path="/var/lib/kubelet/pods/e1d4637f-17ff-456c-9764-dd305c6e3917/volumes" Dec 04 15:27:07 crc kubenswrapper[4946]: I1204 15:27:07.544173 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 15:27:07 crc kubenswrapper[4946]: I1204 15:27:07.564373 4946 generic.go:334] "Generic (PLEG): container finished" podID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerID="c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db" exitCode=0 Dec 04 15:27:07 crc kubenswrapper[4946]: I1204 15:27:07.564425 4946 generic.go:334] "Generic (PLEG): container finished" podID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerID="e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471" exitCode=2 Dec 04 15:27:07 crc kubenswrapper[4946]: I1204 15:27:07.564450 4946 generic.go:334] "Generic (PLEG): container finished" podID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerID="9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741" exitCode=0 Dec 04 15:27:07 crc kubenswrapper[4946]: I1204 15:27:07.564483 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e74405d1-7bee-4ce0-ae1e-982383f8a280","Type":"ContainerDied","Data":"c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db"} Dec 04 15:27:07 crc kubenswrapper[4946]: I1204 15:27:07.564517 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e74405d1-7bee-4ce0-ae1e-982383f8a280","Type":"ContainerDied","Data":"e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471"} Dec 04 15:27:07 crc kubenswrapper[4946]: I1204 15:27:07.564528 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e74405d1-7bee-4ce0-ae1e-982383f8a280","Type":"ContainerDied","Data":"9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741"} Dec 04 15:27:08 crc kubenswrapper[4946]: I1204 15:27:08.581040 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"75446cac-ffe3-4e3a-9bde-e8372b8318c3","Type":"ContainerStarted","Data":"b20b6346e666c2e6dbbabf6cb4aa3f4b1168fa99755a9edff33bd75894c4ff7e"} Dec 04 15:27:08 crc kubenswrapper[4946]: I1204 15:27:08.582730 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"75446cac-ffe3-4e3a-9bde-e8372b8318c3","Type":"ContainerStarted","Data":"45642370365cd828d20183a36ecebf8a77d0ae8e2ace89654b1021c107588efe"} Dec 04 15:27:08 crc kubenswrapper[4946]: I1204 15:27:08.582880 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:08 crc kubenswrapper[4946]: I1204 15:27:08.613200 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.613177779 podStartE2EDuration="2.613177779s" podCreationTimestamp="2025-12-04 15:27:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:27:08.602801779 +0000 UTC m=+1479.488845430" watchObservedRunningTime="2025-12-04 15:27:08.613177779 +0000 UTC m=+1479.499221420" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.082339 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.249637 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-config-data\") pod \"e74405d1-7bee-4ce0-ae1e-982383f8a280\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.249719 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b852j\" (UniqueName: \"kubernetes.io/projected/e74405d1-7bee-4ce0-ae1e-982383f8a280-kube-api-access-b852j\") pod \"e74405d1-7bee-4ce0-ae1e-982383f8a280\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.249818 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-combined-ca-bundle\") pod \"e74405d1-7bee-4ce0-ae1e-982383f8a280\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.249945 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-scripts\") pod \"e74405d1-7bee-4ce0-ae1e-982383f8a280\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.249984 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-sg-core-conf-yaml\") pod \"e74405d1-7bee-4ce0-ae1e-982383f8a280\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.250019 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-log-httpd\") pod \"e74405d1-7bee-4ce0-ae1e-982383f8a280\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.250152 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-run-httpd\") pod \"e74405d1-7bee-4ce0-ae1e-982383f8a280\" (UID: \"e74405d1-7bee-4ce0-ae1e-982383f8a280\") " Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.251090 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e74405d1-7bee-4ce0-ae1e-982383f8a280" (UID: "e74405d1-7bee-4ce0-ae1e-982383f8a280"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.251237 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e74405d1-7bee-4ce0-ae1e-982383f8a280" (UID: "e74405d1-7bee-4ce0-ae1e-982383f8a280"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.251684 4946 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.251711 4946 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e74405d1-7bee-4ce0-ae1e-982383f8a280-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.257870 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e74405d1-7bee-4ce0-ae1e-982383f8a280-kube-api-access-b852j" (OuterVolumeSpecName: "kube-api-access-b852j") pod "e74405d1-7bee-4ce0-ae1e-982383f8a280" (UID: "e74405d1-7bee-4ce0-ae1e-982383f8a280"). InnerVolumeSpecName "kube-api-access-b852j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.258762 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-scripts" (OuterVolumeSpecName: "scripts") pod "e74405d1-7bee-4ce0-ae1e-982383f8a280" (UID: "e74405d1-7bee-4ce0-ae1e-982383f8a280"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.295449 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e74405d1-7bee-4ce0-ae1e-982383f8a280" (UID: "e74405d1-7bee-4ce0-ae1e-982383f8a280"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.349856 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e74405d1-7bee-4ce0-ae1e-982383f8a280" (UID: "e74405d1-7bee-4ce0-ae1e-982383f8a280"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.353421 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b852j\" (UniqueName: \"kubernetes.io/projected/e74405d1-7bee-4ce0-ae1e-982383f8a280-kube-api-access-b852j\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.353457 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.353467 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.353476 4946 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.372260 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-config-data" (OuterVolumeSpecName: "config-data") pod "e74405d1-7bee-4ce0-ae1e-982383f8a280" (UID: "e74405d1-7bee-4ce0-ae1e-982383f8a280"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.454730 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e74405d1-7bee-4ce0-ae1e-982383f8a280-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.620344 4946 generic.go:334] "Generic (PLEG): container finished" podID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerID="28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9" exitCode=0 Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.620399 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e74405d1-7bee-4ce0-ae1e-982383f8a280","Type":"ContainerDied","Data":"28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9"} Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.620428 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e74405d1-7bee-4ce0-ae1e-982383f8a280","Type":"ContainerDied","Data":"fbbb90b7b96d93bb4a1aa1588339056515fc3f24f756dbc850739992d1f48565"} Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.620459 4946 scope.go:117] "RemoveContainer" containerID="c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.620650 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.651273 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.655913 4946 scope.go:117] "RemoveContainer" containerID="e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.661973 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.695482 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:11 crc kubenswrapper[4946]: E1204 15:27:11.696100 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="ceilometer-notification-agent" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.696145 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="ceilometer-notification-agent" Dec 04 15:27:11 crc kubenswrapper[4946]: E1204 15:27:11.696172 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="sg-core" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.696180 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="sg-core" Dec 04 15:27:11 crc kubenswrapper[4946]: E1204 15:27:11.696207 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="proxy-httpd" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.696215 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="proxy-httpd" Dec 04 15:27:11 crc kubenswrapper[4946]: E1204 15:27:11.696235 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="ceilometer-central-agent" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.696242 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="ceilometer-central-agent" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.696480 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="proxy-httpd" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.696508 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="sg-core" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.696529 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="ceilometer-central-agent" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.696538 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" containerName="ceilometer-notification-agent" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.698904 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.712694 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.712771 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.712784 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.761441 4946 scope.go:117] "RemoveContainer" containerID="28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.782907 4946 scope.go:117] "RemoveContainer" containerID="9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.804823 4946 scope.go:117] "RemoveContainer" containerID="c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db" Dec 04 15:27:11 crc kubenswrapper[4946]: E1204 15:27:11.805429 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db\": container with ID starting with c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db not found: ID does not exist" containerID="c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.805484 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db"} err="failed to get container status \"c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db\": rpc error: code = NotFound desc = could not find container \"c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db\": container with ID starting with c849cf84934ab20e373b7b78f930f9e8d68db61550e5d971a5034c013ee226db not found: ID does not exist" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.805556 4946 scope.go:117] "RemoveContainer" containerID="e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471" Dec 04 15:27:11 crc kubenswrapper[4946]: E1204 15:27:11.805963 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471\": container with ID starting with e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471 not found: ID does not exist" containerID="e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.805999 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471"} err="failed to get container status \"e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471\": rpc error: code = NotFound desc = could not find container \"e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471\": container with ID starting with e73cbe0147d5f743ec36980b93291caf09e89cdeb1b6e093dd0480f6f7b35471 not found: ID does not exist" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.806026 4946 scope.go:117] "RemoveContainer" containerID="28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9" Dec 04 15:27:11 crc kubenswrapper[4946]: E1204 15:27:11.806401 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9\": container with ID starting with 28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9 not found: ID does not exist" containerID="28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.806479 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9"} err="failed to get container status \"28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9\": rpc error: code = NotFound desc = could not find container \"28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9\": container with ID starting with 28bf40102534f81985901135f1835b1d4b18b51714666805916c00cf279864e9 not found: ID does not exist" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.806526 4946 scope.go:117] "RemoveContainer" containerID="9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741" Dec 04 15:27:11 crc kubenswrapper[4946]: E1204 15:27:11.806900 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741\": container with ID starting with 9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741 not found: ID does not exist" containerID="9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.806930 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741"} err="failed to get container status \"9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741\": rpc error: code = NotFound desc = could not find container \"9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741\": container with ID starting with 9d6fc0c64723a1e8d85f6558cf43178e790d21391b0bc85a16e7398e57ed5741 not found: ID does not exist" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.883363 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-scripts\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.883856 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-log-httpd\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.883921 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.884071 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.884298 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-config-data\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.884335 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-run-httpd\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.884383 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rcp6\" (UniqueName: \"kubernetes.io/projected/f81e4d9b-578a-4656-ac39-a36738ae194f-kube-api-access-6rcp6\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.986200 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.986340 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-config-data\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.986374 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-run-httpd\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.986417 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rcp6\" (UniqueName: \"kubernetes.io/projected/f81e4d9b-578a-4656-ac39-a36738ae194f-kube-api-access-6rcp6\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.986462 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-scripts\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.986499 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-log-httpd\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.986549 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.987493 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-run-httpd\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.987538 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-log-httpd\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.990868 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.991307 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:11 crc kubenswrapper[4946]: I1204 15:27:11.991605 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-scripts\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:12 crc kubenswrapper[4946]: I1204 15:27:12.007451 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-config-data\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:12 crc kubenswrapper[4946]: I1204 15:27:12.014623 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rcp6\" (UniqueName: \"kubernetes.io/projected/f81e4d9b-578a-4656-ac39-a36738ae194f-kube-api-access-6rcp6\") pod \"ceilometer-0\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " pod="openstack/ceilometer-0" Dec 04 15:27:12 crc kubenswrapper[4946]: I1204 15:27:12.051301 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:27:12 crc kubenswrapper[4946]: I1204 15:27:12.909926 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:12 crc kubenswrapper[4946]: W1204 15:27:12.911530 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf81e4d9b_578a_4656_ac39_a36738ae194f.slice/crio-511ee1cca42490ec636ab9a670ab987198efa3a13633c0752eabf77bbdda769c WatchSource:0}: Error finding container 511ee1cca42490ec636ab9a670ab987198efa3a13633c0752eabf77bbdda769c: Status 404 returned error can't find the container with id 511ee1cca42490ec636ab9a670ab987198efa3a13633c0752eabf77bbdda769c Dec 04 15:27:13 crc kubenswrapper[4946]: I1204 15:27:13.472556 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e74405d1-7bee-4ce0-ae1e-982383f8a280" path="/var/lib/kubelet/pods/e74405d1-7bee-4ce0-ae1e-982383f8a280/volumes" Dec 04 15:27:13 crc kubenswrapper[4946]: I1204 15:27:13.653960 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f81e4d9b-578a-4656-ac39-a36738ae194f","Type":"ContainerStarted","Data":"511ee1cca42490ec636ab9a670ab987198efa3a13633c0752eabf77bbdda769c"} Dec 04 15:27:14 crc kubenswrapper[4946]: I1204 15:27:14.667739 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f81e4d9b-578a-4656-ac39-a36738ae194f","Type":"ContainerStarted","Data":"6f2514cd25ee648dce8d435a0f300ecbf3949eaa715a9ae7aeed889d136d53dd"} Dec 04 15:27:16 crc kubenswrapper[4946]: I1204 15:27:16.697901 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f81e4d9b-578a-4656-ac39-a36738ae194f","Type":"ContainerStarted","Data":"1b2e09a2b75199b84e8b68ab565afce2c1bd6b0301f2fc0e34280099347a4b46"} Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.048464 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.528711 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-lzwkv"] Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.530484 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.543802 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.544034 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.547961 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-lzwkv"] Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.711845 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrkl8\" (UniqueName: \"kubernetes.io/projected/e47ae595-d6e7-4eec-828f-98755b3d08b5-kube-api-access-qrkl8\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.712083 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-config-data\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.712181 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-scripts\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.712212 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.739533 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f81e4d9b-578a-4656-ac39-a36738ae194f","Type":"ContainerStarted","Data":"074575e82961cbf82b2a18524211c93c9f115c3ee2fcf3818684c001b7b49357"} Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.784678 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.786700 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.817650 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.819571 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.821590 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-config-data\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.821673 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-scripts\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.821708 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.821838 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrkl8\" (UniqueName: \"kubernetes.io/projected/e47ae595-d6e7-4eec-828f-98755b3d08b5-kube-api-access-qrkl8\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.830405 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-scripts\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.856566 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-config-data\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.874732 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.919471 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrkl8\" (UniqueName: \"kubernetes.io/projected/e47ae595-d6e7-4eec-828f-98755b3d08b5-kube-api-access-qrkl8\") pod \"nova-cell0-cell-mapping-lzwkv\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.936174 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pvml\" (UniqueName: \"kubernetes.io/projected/5bf2cf28-d180-411d-b617-6033ce853019-kube-api-access-5pvml\") pod \"nova-scheduler-0\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.936334 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.936468 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-config-data\") pod \"nova-scheduler-0\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.957295 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.961256 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:27:17 crc kubenswrapper[4946]: I1204 15:27:17.969367 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.019005 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.055491 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pvml\" (UniqueName: \"kubernetes.io/projected/5bf2cf28-d180-411d-b617-6033ce853019-kube-api-access-5pvml\") pod \"nova-scheduler-0\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.055567 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.055628 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-config-data\") pod \"nova-scheduler-0\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.062344 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.076485 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-config-data\") pod \"nova-scheduler-0\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.086186 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.088887 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.110687 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.120145 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pvml\" (UniqueName: \"kubernetes.io/projected/5bf2cf28-d180-411d-b617-6033ce853019-kube-api-access-5pvml\") pod \"nova-scheduler-0\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.146738 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.162081 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x2z2\" (UniqueName: \"kubernetes.io/projected/6a5e7e17-c32e-4713-ba05-de91486be60d-kube-api-access-7x2z2\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.162133 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.162231 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.162254 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.162292 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-config-data\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.162313 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtzp8\" (UniqueName: \"kubernetes.io/projected/b01d659d-7246-4db4-bce4-1d81adc7bb5b-kube-api-access-jtzp8\") pod \"nova-cell1-novncproxy-0\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.162340 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a5e7e17-c32e-4713-ba05-de91486be60d-logs\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.163941 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.179308 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.185477 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.190763 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.191895 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.233336 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78cd565959-rqn9f"] Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.235600 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.245611 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-rqn9f"] Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.267930 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268014 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268038 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268058 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268077 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-config-data\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268107 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-config-data\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268144 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-svc\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268162 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtzp8\" (UniqueName: \"kubernetes.io/projected/b01d659d-7246-4db4-bce4-1d81adc7bb5b-kube-api-access-jtzp8\") pod \"nova-cell1-novncproxy-0\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268192 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msg5c\" (UniqueName: \"kubernetes.io/projected/2446178b-93ab-4a7f-aa76-2ead2703bcc8-kube-api-access-msg5c\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268211 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a5e7e17-c32e-4713-ba05-de91486be60d-logs\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268231 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268248 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268290 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x2z2\" (UniqueName: \"kubernetes.io/projected/6a5e7e17-c32e-4713-ba05-de91486be60d-kube-api-access-7x2z2\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268307 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268332 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-config\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268352 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2446178b-93ab-4a7f-aa76-2ead2703bcc8-logs\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.268386 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvxlr\" (UniqueName: \"kubernetes.io/projected/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-kube-api-access-zvxlr\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.274137 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a5e7e17-c32e-4713-ba05-de91486be60d-logs\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.275357 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.280228 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-config-data\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.295325 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.297338 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.300403 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtzp8\" (UniqueName: \"kubernetes.io/projected/b01d659d-7246-4db4-bce4-1d81adc7bb5b-kube-api-access-jtzp8\") pod \"nova-cell1-novncproxy-0\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.301909 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x2z2\" (UniqueName: \"kubernetes.io/projected/6a5e7e17-c32e-4713-ba05-de91486be60d-kube-api-access-7x2z2\") pod \"nova-api-0\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.326928 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.350746 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.371183 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-svc\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.371800 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msg5c\" (UniqueName: \"kubernetes.io/projected/2446178b-93ab-4a7f-aa76-2ead2703bcc8-kube-api-access-msg5c\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.371835 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.371881 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.371983 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-config\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.372007 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2446178b-93ab-4a7f-aa76-2ead2703bcc8-logs\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.372081 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvxlr\" (UniqueName: \"kubernetes.io/projected/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-kube-api-access-zvxlr\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.372206 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.372357 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.372395 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-config-data\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.374194 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-config\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.374243 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2446178b-93ab-4a7f-aa76-2ead2703bcc8-logs\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.375424 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-svc\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.379896 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-config-data\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.380852 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.383732 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.383934 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.391613 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.397689 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvxlr\" (UniqueName: \"kubernetes.io/projected/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-kube-api-access-zvxlr\") pod \"dnsmasq-dns-78cd565959-rqn9f\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.402815 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msg5c\" (UniqueName: \"kubernetes.io/projected/2446178b-93ab-4a7f-aa76-2ead2703bcc8-kube-api-access-msg5c\") pod \"nova-metadata-0\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.503601 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.549358 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.581711 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.802801 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f81e4d9b-578a-4656-ac39-a36738ae194f","Type":"ContainerStarted","Data":"f523307d41279f5eac58293824890658feb3ea5f6f534e3affe5e0962b8c2267"} Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.803339 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.844352 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-lzwkv"] Dec 04 15:27:18 crc kubenswrapper[4946]: I1204 15:27:18.847734 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.161074077 podStartE2EDuration="7.847708733s" podCreationTimestamp="2025-12-04 15:27:11 +0000 UTC" firstStartedPulling="2025-12-04 15:27:12.914700944 +0000 UTC m=+1483.800744585" lastFinishedPulling="2025-12-04 15:27:17.6013356 +0000 UTC m=+1488.487379241" observedRunningTime="2025-12-04 15:27:18.835931126 +0000 UTC m=+1489.721974767" watchObservedRunningTime="2025-12-04 15:27:18.847708733 +0000 UTC m=+1489.733752374" Dec 04 15:27:18 crc kubenswrapper[4946]: W1204 15:27:18.902340 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode47ae595_d6e7_4eec_828f_98755b3d08b5.slice/crio-298c430cd743a2958c1673a5c9e62b264831ba89183eb307490be9ad1141b241 WatchSource:0}: Error finding container 298c430cd743a2958c1673a5c9e62b264831ba89183eb307490be9ad1141b241: Status 404 returned error can't find the container with id 298c430cd743a2958c1673a5c9e62b264831ba89183eb307490be9ad1141b241 Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.392040 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.446212 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.538162 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hnrx8"] Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.539893 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.543813 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.544055 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.562595 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hnrx8"] Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.721942 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.722076 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp8hc\" (UniqueName: \"kubernetes.io/projected/baed508c-c3b8-40d5-a421-5121f9e3f8f5-kube-api-access-jp8hc\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.722321 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-scripts\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.722411 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-config-data\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.828714 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.829258 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp8hc\" (UniqueName: \"kubernetes.io/projected/baed508c-c3b8-40d5-a421-5121f9e3f8f5-kube-api-access-jp8hc\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.829696 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-scripts\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.829848 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-config-data\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.847597 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-config-data\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.850203 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.850178 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-scripts\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.861054 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp8hc\" (UniqueName: \"kubernetes.io/projected/baed508c-c3b8-40d5-a421-5121f9e3f8f5-kube-api-access-jp8hc\") pod \"nova-cell1-conductor-db-sync-hnrx8\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.880665 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.882944 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-lzwkv" event={"ID":"e47ae595-d6e7-4eec-828f-98755b3d08b5","Type":"ContainerStarted","Data":"bfcabc4a12f7727b6260f34c03e58fa3866948951b8ae0ab6045d59e5bd0c782"} Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.883002 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-lzwkv" event={"ID":"e47ae595-d6e7-4eec-828f-98755b3d08b5","Type":"ContainerStarted","Data":"298c430cd743a2958c1673a5c9e62b264831ba89183eb307490be9ad1141b241"} Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.887430 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-rqn9f"] Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.888395 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5bf2cf28-d180-411d-b617-6033ce853019","Type":"ContainerStarted","Data":"5da3dadb1777d2465668f2d317e8a79ed045b3b58a5895ea885a6ade07c00ce0"} Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.922453 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a5e7e17-c32e-4713-ba05-de91486be60d","Type":"ContainerStarted","Data":"b1e53d64686ea16b987a72a5fe6f7b9c22a78f5130c5808b5e3f266147027854"} Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.938501 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.967210 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 15:27:19 crc kubenswrapper[4946]: I1204 15:27:19.991103 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-lzwkv" podStartSLOduration=2.991070079 podStartE2EDuration="2.991070079s" podCreationTimestamp="2025-12-04 15:27:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:27:19.900603581 +0000 UTC m=+1490.786647222" watchObservedRunningTime="2025-12-04 15:27:19.991070079 +0000 UTC m=+1490.877113720" Dec 04 15:27:20 crc kubenswrapper[4946]: I1204 15:27:20.768416 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hnrx8"] Dec 04 15:27:20 crc kubenswrapper[4946]: I1204 15:27:20.962808 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b01d659d-7246-4db4-bce4-1d81adc7bb5b","Type":"ContainerStarted","Data":"e4d6f1789d0bad302dfd8fbd50c34dfaef4ddbf2900f93051acf09a35ee735e5"} Dec 04 15:27:20 crc kubenswrapper[4946]: I1204 15:27:20.978378 4946 generic.go:334] "Generic (PLEG): container finished" podID="375837a2-49ae-4f0b-bc7c-a1ba198a8d14" containerID="8c1cd2491db148fde07b36f923c557468a263d10af3e79c9f9f6333e1f3cc6a5" exitCode=0 Dec 04 15:27:20 crc kubenswrapper[4946]: I1204 15:27:20.978483 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" event={"ID":"375837a2-49ae-4f0b-bc7c-a1ba198a8d14","Type":"ContainerDied","Data":"8c1cd2491db148fde07b36f923c557468a263d10af3e79c9f9f6333e1f3cc6a5"} Dec 04 15:27:20 crc kubenswrapper[4946]: I1204 15:27:20.978510 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" event={"ID":"375837a2-49ae-4f0b-bc7c-a1ba198a8d14","Type":"ContainerStarted","Data":"34d6e0805680296c5d966aff3983d445b4e8b85a0e2209373f9b36177170019d"} Dec 04 15:27:20 crc kubenswrapper[4946]: I1204 15:27:20.990883 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hnrx8" event={"ID":"baed508c-c3b8-40d5-a421-5121f9e3f8f5","Type":"ContainerStarted","Data":"0371ef6b364242501e59057248be19c057256e892953427fd85ac465023b0d3b"} Dec 04 15:27:21 crc kubenswrapper[4946]: I1204 15:27:21.003730 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2446178b-93ab-4a7f-aa76-2ead2703bcc8","Type":"ContainerStarted","Data":"a3a49899ecb48a19f1419f75ae7e997c29b18f5530010406ff1e6dd29fa7d7c7"} Dec 04 15:27:22 crc kubenswrapper[4946]: I1204 15:27:22.031520 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hnrx8" event={"ID":"baed508c-c3b8-40d5-a421-5121f9e3f8f5","Type":"ContainerStarted","Data":"cdc3517ae7b0fcce6b3536308c82edfd1c7053cd4782150c9009eb8cfe3a468e"} Dec 04 15:27:22 crc kubenswrapper[4946]: I1204 15:27:22.072799 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-hnrx8" podStartSLOduration=3.072773716 podStartE2EDuration="3.072773716s" podCreationTimestamp="2025-12-04 15:27:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:27:22.050984589 +0000 UTC m=+1492.937028240" watchObservedRunningTime="2025-12-04 15:27:22.072773716 +0000 UTC m=+1492.958817357" Dec 04 15:27:22 crc kubenswrapper[4946]: I1204 15:27:22.789727 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:22 crc kubenswrapper[4946]: I1204 15:27:22.810164 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 15:27:23 crc kubenswrapper[4946]: I1204 15:27:23.047941 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" event={"ID":"375837a2-49ae-4f0b-bc7c-a1ba198a8d14","Type":"ContainerStarted","Data":"51332b4897b78e204a71b8f8333578799325b34c33d0960e53dfa0bf25906353"} Dec 04 15:27:23 crc kubenswrapper[4946]: I1204 15:27:23.048400 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:23 crc kubenswrapper[4946]: I1204 15:27:23.088383 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" podStartSLOduration=5.088294977 podStartE2EDuration="5.088294977s" podCreationTimestamp="2025-12-04 15:27:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:27:23.076741956 +0000 UTC m=+1493.962785607" watchObservedRunningTime="2025-12-04 15:27:23.088294977 +0000 UTC m=+1493.974338628" Dec 04 15:27:25 crc kubenswrapper[4946]: I1204 15:27:25.071450 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2446178b-93ab-4a7f-aa76-2ead2703bcc8","Type":"ContainerStarted","Data":"232c5e650cb3bdda2206d9c10a9362d77bbeaa51b35a7cf881205b8f3efa7803"} Dec 04 15:27:25 crc kubenswrapper[4946]: I1204 15:27:25.076509 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b01d659d-7246-4db4-bce4-1d81adc7bb5b","Type":"ContainerStarted","Data":"ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3"} Dec 04 15:27:25 crc kubenswrapper[4946]: I1204 15:27:25.076638 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="b01d659d-7246-4db4-bce4-1d81adc7bb5b" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3" gracePeriod=30 Dec 04 15:27:25 crc kubenswrapper[4946]: I1204 15:27:25.085846 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5bf2cf28-d180-411d-b617-6033ce853019","Type":"ContainerStarted","Data":"3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b"} Dec 04 15:27:25 crc kubenswrapper[4946]: I1204 15:27:25.090075 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a5e7e17-c32e-4713-ba05-de91486be60d","Type":"ContainerStarted","Data":"48d8d77524f46ff21bb850733a59702531d5dae490d1bb444868143d1aee0fc4"} Dec 04 15:27:25 crc kubenswrapper[4946]: I1204 15:27:25.109975 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.572163312 podStartE2EDuration="8.109947965s" podCreationTimestamp="2025-12-04 15:27:17 +0000 UTC" firstStartedPulling="2025-12-04 15:27:19.952780618 +0000 UTC m=+1490.838824249" lastFinishedPulling="2025-12-04 15:27:24.490565261 +0000 UTC m=+1495.376608902" observedRunningTime="2025-12-04 15:27:25.098856976 +0000 UTC m=+1495.984900617" watchObservedRunningTime="2025-12-04 15:27:25.109947965 +0000 UTC m=+1495.995991606" Dec 04 15:27:25 crc kubenswrapper[4946]: I1204 15:27:25.129700 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.1060898 podStartE2EDuration="8.129677687s" podCreationTimestamp="2025-12-04 15:27:17 +0000 UTC" firstStartedPulling="2025-12-04 15:27:19.465026021 +0000 UTC m=+1490.351069662" lastFinishedPulling="2025-12-04 15:27:24.488613908 +0000 UTC m=+1495.374657549" observedRunningTime="2025-12-04 15:27:25.129137042 +0000 UTC m=+1496.015180673" watchObservedRunningTime="2025-12-04 15:27:25.129677687 +0000 UTC m=+1496.015721318" Dec 04 15:27:26 crc kubenswrapper[4946]: I1204 15:27:26.109516 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2446178b-93ab-4a7f-aa76-2ead2703bcc8","Type":"ContainerStarted","Data":"597c87a0945f0c996d835673cd0b24d5d774da50f6e9047e9181d5133314acf4"} Dec 04 15:27:26 crc kubenswrapper[4946]: I1204 15:27:26.109697 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2446178b-93ab-4a7f-aa76-2ead2703bcc8" containerName="nova-metadata-log" containerID="cri-o://232c5e650cb3bdda2206d9c10a9362d77bbeaa51b35a7cf881205b8f3efa7803" gracePeriod=30 Dec 04 15:27:26 crc kubenswrapper[4946]: I1204 15:27:26.109812 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2446178b-93ab-4a7f-aa76-2ead2703bcc8" containerName="nova-metadata-metadata" containerID="cri-o://597c87a0945f0c996d835673cd0b24d5d774da50f6e9047e9181d5133314acf4" gracePeriod=30 Dec 04 15:27:26 crc kubenswrapper[4946]: I1204 15:27:26.114711 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a5e7e17-c32e-4713-ba05-de91486be60d","Type":"ContainerStarted","Data":"bb4424e4af8ae228360eca8ec81e8deb2bd2964c02861942ca904a63e17ed685"} Dec 04 15:27:26 crc kubenswrapper[4946]: I1204 15:27:26.142349 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.5608610590000005 podStartE2EDuration="9.14232984s" podCreationTimestamp="2025-12-04 15:27:17 +0000 UTC" firstStartedPulling="2025-12-04 15:27:19.909494171 +0000 UTC m=+1490.795537812" lastFinishedPulling="2025-12-04 15:27:24.490962952 +0000 UTC m=+1495.377006593" observedRunningTime="2025-12-04 15:27:26.137066718 +0000 UTC m=+1497.023110359" watchObservedRunningTime="2025-12-04 15:27:26.14232984 +0000 UTC m=+1497.028373481" Dec 04 15:27:26 crc kubenswrapper[4946]: I1204 15:27:26.157128 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.100031999 podStartE2EDuration="9.157095108s" podCreationTimestamp="2025-12-04 15:27:17 +0000 UTC" firstStartedPulling="2025-12-04 15:27:19.436917904 +0000 UTC m=+1490.322961545" lastFinishedPulling="2025-12-04 15:27:24.493981013 +0000 UTC m=+1495.380024654" observedRunningTime="2025-12-04 15:27:26.15455427 +0000 UTC m=+1497.040597931" watchObservedRunningTime="2025-12-04 15:27:26.157095108 +0000 UTC m=+1497.043138749" Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.134066 4946 generic.go:334] "Generic (PLEG): container finished" podID="2446178b-93ab-4a7f-aa76-2ead2703bcc8" containerID="597c87a0945f0c996d835673cd0b24d5d774da50f6e9047e9181d5133314acf4" exitCode=0 Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.134743 4946 generic.go:334] "Generic (PLEG): container finished" podID="2446178b-93ab-4a7f-aa76-2ead2703bcc8" containerID="232c5e650cb3bdda2206d9c10a9362d77bbeaa51b35a7cf881205b8f3efa7803" exitCode=143 Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.134600 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2446178b-93ab-4a7f-aa76-2ead2703bcc8","Type":"ContainerDied","Data":"597c87a0945f0c996d835673cd0b24d5d774da50f6e9047e9181d5133314acf4"} Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.136018 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2446178b-93ab-4a7f-aa76-2ead2703bcc8","Type":"ContainerDied","Data":"232c5e650cb3bdda2206d9c10a9362d77bbeaa51b35a7cf881205b8f3efa7803"} Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.356711 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.535139 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2446178b-93ab-4a7f-aa76-2ead2703bcc8-logs\") pod \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.535315 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-combined-ca-bundle\") pod \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.535398 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-config-data\") pod \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.535467 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msg5c\" (UniqueName: \"kubernetes.io/projected/2446178b-93ab-4a7f-aa76-2ead2703bcc8-kube-api-access-msg5c\") pod \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\" (UID: \"2446178b-93ab-4a7f-aa76-2ead2703bcc8\") " Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.535642 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2446178b-93ab-4a7f-aa76-2ead2703bcc8-logs" (OuterVolumeSpecName: "logs") pod "2446178b-93ab-4a7f-aa76-2ead2703bcc8" (UID: "2446178b-93ab-4a7f-aa76-2ead2703bcc8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.537659 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2446178b-93ab-4a7f-aa76-2ead2703bcc8-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.541464 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2446178b-93ab-4a7f-aa76-2ead2703bcc8-kube-api-access-msg5c" (OuterVolumeSpecName: "kube-api-access-msg5c") pod "2446178b-93ab-4a7f-aa76-2ead2703bcc8" (UID: "2446178b-93ab-4a7f-aa76-2ead2703bcc8"). InnerVolumeSpecName "kube-api-access-msg5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.570832 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-config-data" (OuterVolumeSpecName: "config-data") pod "2446178b-93ab-4a7f-aa76-2ead2703bcc8" (UID: "2446178b-93ab-4a7f-aa76-2ead2703bcc8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.571100 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2446178b-93ab-4a7f-aa76-2ead2703bcc8" (UID: "2446178b-93ab-4a7f-aa76-2ead2703bcc8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.640296 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.640347 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2446178b-93ab-4a7f-aa76-2ead2703bcc8-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:27 crc kubenswrapper[4946]: I1204 15:27:27.640364 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msg5c\" (UniqueName: \"kubernetes.io/projected/2446178b-93ab-4a7f-aa76-2ead2703bcc8-kube-api-access-msg5c\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.148431 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2446178b-93ab-4a7f-aa76-2ead2703bcc8","Type":"ContainerDied","Data":"a3a49899ecb48a19f1419f75ae7e997c29b18f5530010406ff1e6dd29fa7d7c7"} Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.148490 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.148928 4946 scope.go:117] "RemoveContainer" containerID="597c87a0945f0c996d835673cd0b24d5d774da50f6e9047e9181d5133314acf4" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.186043 4946 scope.go:117] "RemoveContainer" containerID="232c5e650cb3bdda2206d9c10a9362d77bbeaa51b35a7cf881205b8f3efa7803" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.209040 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.223231 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.237808 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:28 crc kubenswrapper[4946]: E1204 15:27:28.238319 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2446178b-93ab-4a7f-aa76-2ead2703bcc8" containerName="nova-metadata-log" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.238342 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="2446178b-93ab-4a7f-aa76-2ead2703bcc8" containerName="nova-metadata-log" Dec 04 15:27:28 crc kubenswrapper[4946]: E1204 15:27:28.238359 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2446178b-93ab-4a7f-aa76-2ead2703bcc8" containerName="nova-metadata-metadata" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.238368 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="2446178b-93ab-4a7f-aa76-2ead2703bcc8" containerName="nova-metadata-metadata" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.238656 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="2446178b-93ab-4a7f-aa76-2ead2703bcc8" containerName="nova-metadata-metadata" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.238696 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="2446178b-93ab-4a7f-aa76-2ead2703bcc8" containerName="nova-metadata-log" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.240239 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.243884 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.247504 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.259578 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.328438 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.328534 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.351578 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.351659 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.354042 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.355082 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-logs\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.355252 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn9f9\" (UniqueName: \"kubernetes.io/projected/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-kube-api-access-xn9f9\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.356344 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.356536 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-config-data\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.366683 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.458296 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn9f9\" (UniqueName: \"kubernetes.io/projected/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-kube-api-access-xn9f9\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.458357 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.458425 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-config-data\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.458510 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.458604 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-logs\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.460744 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-logs\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.464800 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-config-data\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.464887 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.464888 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.482980 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn9f9\" (UniqueName: \"kubernetes.io/projected/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-kube-api-access-xn9f9\") pod \"nova-metadata-0\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.505002 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.561599 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.585398 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.689396 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-qrznx"] Dec 04 15:27:28 crc kubenswrapper[4946]: I1204 15:27:28.690322 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" podUID="53c3c8fa-f2df-43db-aa5b-cbee4b29d487" containerName="dnsmasq-dns" containerID="cri-o://259e523792d0edb6d87809f5b03edf5a10c0001cd075ec27cde28084cbdf0eb6" gracePeriod=10 Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.162644 4946 generic.go:334] "Generic (PLEG): container finished" podID="e47ae595-d6e7-4eec-828f-98755b3d08b5" containerID="bfcabc4a12f7727b6260f34c03e58fa3866948951b8ae0ab6045d59e5bd0c782" exitCode=0 Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.163172 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-lzwkv" event={"ID":"e47ae595-d6e7-4eec-828f-98755b3d08b5","Type":"ContainerDied","Data":"bfcabc4a12f7727b6260f34c03e58fa3866948951b8ae0ab6045d59e5bd0c782"} Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.176212 4946 generic.go:334] "Generic (PLEG): container finished" podID="53c3c8fa-f2df-43db-aa5b-cbee4b29d487" containerID="259e523792d0edb6d87809f5b03edf5a10c0001cd075ec27cde28084cbdf0eb6" exitCode=0 Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.176333 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" event={"ID":"53c3c8fa-f2df-43db-aa5b-cbee4b29d487","Type":"ContainerDied","Data":"259e523792d0edb6d87809f5b03edf5a10c0001cd075ec27cde28084cbdf0eb6"} Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.230954 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.235049 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.425049 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.436596 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.211:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.436699 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.211:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.504039 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2446178b-93ab-4a7f-aa76-2ead2703bcc8" path="/var/lib/kubelet/pods/2446178b-93ab-4a7f-aa76-2ead2703bcc8/volumes" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.512308 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-sb\") pod \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.613325 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "53c3c8fa-f2df-43db-aa5b-cbee4b29d487" (UID: "53c3c8fa-f2df-43db-aa5b-cbee4b29d487"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.614535 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-svc\") pod \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.614596 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-nb\") pod \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.614622 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-swift-storage-0\") pod \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.614670 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p267c\" (UniqueName: \"kubernetes.io/projected/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-kube-api-access-p267c\") pod \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.614709 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-config\") pod \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\" (UID: \"53c3c8fa-f2df-43db-aa5b-cbee4b29d487\") " Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.616827 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.622608 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-kube-api-access-p267c" (OuterVolumeSpecName: "kube-api-access-p267c") pod "53c3c8fa-f2df-43db-aa5b-cbee4b29d487" (UID: "53c3c8fa-f2df-43db-aa5b-cbee4b29d487"). InnerVolumeSpecName "kube-api-access-p267c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.712223 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "53c3c8fa-f2df-43db-aa5b-cbee4b29d487" (UID: "53c3c8fa-f2df-43db-aa5b-cbee4b29d487"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.719178 4946 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.719221 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p267c\" (UniqueName: \"kubernetes.io/projected/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-kube-api-access-p267c\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.724991 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "53c3c8fa-f2df-43db-aa5b-cbee4b29d487" (UID: "53c3c8fa-f2df-43db-aa5b-cbee4b29d487"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.731720 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-config" (OuterVolumeSpecName: "config") pod "53c3c8fa-f2df-43db-aa5b-cbee4b29d487" (UID: "53c3c8fa-f2df-43db-aa5b-cbee4b29d487"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.745789 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "53c3c8fa-f2df-43db-aa5b-cbee4b29d487" (UID: "53c3c8fa-f2df-43db-aa5b-cbee4b29d487"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.821745 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.821787 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:29 crc kubenswrapper[4946]: I1204 15:27:29.821798 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53c3c8fa-f2df-43db-aa5b-cbee4b29d487-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.193855 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cefeef36-2c7f-418f-bfbd-c8f56b74d79c","Type":"ContainerStarted","Data":"57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31"} Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.193953 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cefeef36-2c7f-418f-bfbd-c8f56b74d79c","Type":"ContainerStarted","Data":"38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a"} Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.193967 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cefeef36-2c7f-418f-bfbd-c8f56b74d79c","Type":"ContainerStarted","Data":"7daeaaa03c778e01f72957b65cff9e76e6b7bb13c7427bd987579650242d37e8"} Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.201431 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.202310 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-qrznx" event={"ID":"53c3c8fa-f2df-43db-aa5b-cbee4b29d487","Type":"ContainerDied","Data":"80435d55841e2ee7f94f7c40376830feb7db0936044e3453df9c7bf9b1b98278"} Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.202395 4946 scope.go:117] "RemoveContainer" containerID="259e523792d0edb6d87809f5b03edf5a10c0001cd075ec27cde28084cbdf0eb6" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.240366 4946 scope.go:117] "RemoveContainer" containerID="e6d6fd2823510114a11b4a940235e29694d57abdf7a92da214138f2fde5a4529" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.270101 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.270074643 podStartE2EDuration="2.270074643s" podCreationTimestamp="2025-12-04 15:27:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:27:30.22694331 +0000 UTC m=+1501.112986951" watchObservedRunningTime="2025-12-04 15:27:30.270074643 +0000 UTC m=+1501.156118274" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.272439 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-qrznx"] Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.282294 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-qrznx"] Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.705794 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.842631 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrkl8\" (UniqueName: \"kubernetes.io/projected/e47ae595-d6e7-4eec-828f-98755b3d08b5-kube-api-access-qrkl8\") pod \"e47ae595-d6e7-4eec-828f-98755b3d08b5\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.842899 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-scripts\") pod \"e47ae595-d6e7-4eec-828f-98755b3d08b5\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.842960 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-config-data\") pod \"e47ae595-d6e7-4eec-828f-98755b3d08b5\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.843057 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-combined-ca-bundle\") pod \"e47ae595-d6e7-4eec-828f-98755b3d08b5\" (UID: \"e47ae595-d6e7-4eec-828f-98755b3d08b5\") " Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.849417 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-scripts" (OuterVolumeSpecName: "scripts") pod "e47ae595-d6e7-4eec-828f-98755b3d08b5" (UID: "e47ae595-d6e7-4eec-828f-98755b3d08b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.870411 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e47ae595-d6e7-4eec-828f-98755b3d08b5-kube-api-access-qrkl8" (OuterVolumeSpecName: "kube-api-access-qrkl8") pod "e47ae595-d6e7-4eec-828f-98755b3d08b5" (UID: "e47ae595-d6e7-4eec-828f-98755b3d08b5"). InnerVolumeSpecName "kube-api-access-qrkl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.884300 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-config-data" (OuterVolumeSpecName: "config-data") pod "e47ae595-d6e7-4eec-828f-98755b3d08b5" (UID: "e47ae595-d6e7-4eec-828f-98755b3d08b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.889428 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e47ae595-d6e7-4eec-828f-98755b3d08b5" (UID: "e47ae595-d6e7-4eec-828f-98755b3d08b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.956927 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrkl8\" (UniqueName: \"kubernetes.io/projected/e47ae595-d6e7-4eec-828f-98755b3d08b5-kube-api-access-qrkl8\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.957076 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.957092 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:30 crc kubenswrapper[4946]: I1204 15:27:30.957108 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e47ae595-d6e7-4eec-828f-98755b3d08b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.214019 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-lzwkv" event={"ID":"e47ae595-d6e7-4eec-828f-98755b3d08b5","Type":"ContainerDied","Data":"298c430cd743a2958c1673a5c9e62b264831ba89183eb307490be9ad1141b241"} Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.214076 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="298c430cd743a2958c1673a5c9e62b264831ba89183eb307490be9ad1141b241" Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.214095 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-lzwkv" Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.216289 4946 generic.go:334] "Generic (PLEG): container finished" podID="baed508c-c3b8-40d5-a421-5121f9e3f8f5" containerID="cdc3517ae7b0fcce6b3536308c82edfd1c7053cd4782150c9009eb8cfe3a468e" exitCode=0 Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.216358 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hnrx8" event={"ID":"baed508c-c3b8-40d5-a421-5121f9e3f8f5","Type":"ContainerDied","Data":"cdc3517ae7b0fcce6b3536308c82edfd1c7053cd4782150c9009eb8cfe3a468e"} Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.385344 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.385950 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerName="nova-api-log" containerID="cri-o://48d8d77524f46ff21bb850733a59702531d5dae490d1bb444868143d1aee0fc4" gracePeriod=30 Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.386008 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerName="nova-api-api" containerID="cri-o://bb4424e4af8ae228360eca8ec81e8deb2bd2964c02861942ca904a63e17ed685" gracePeriod=30 Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.400204 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.400474 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="5bf2cf28-d180-411d-b617-6033ce853019" containerName="nova-scheduler-scheduler" containerID="cri-o://3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b" gracePeriod=30 Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.442252 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:31 crc kubenswrapper[4946]: I1204 15:27:31.466135 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53c3c8fa-f2df-43db-aa5b-cbee4b29d487" path="/var/lib/kubelet/pods/53c3c8fa-f2df-43db-aa5b-cbee4b29d487/volumes" Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.227831 4946 generic.go:334] "Generic (PLEG): container finished" podID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerID="48d8d77524f46ff21bb850733a59702531d5dae490d1bb444868143d1aee0fc4" exitCode=143 Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.227923 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a5e7e17-c32e-4713-ba05-de91486be60d","Type":"ContainerDied","Data":"48d8d77524f46ff21bb850733a59702531d5dae490d1bb444868143d1aee0fc4"} Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.228485 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cefeef36-2c7f-418f-bfbd-c8f56b74d79c" containerName="nova-metadata-log" containerID="cri-o://38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a" gracePeriod=30 Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.229026 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cefeef36-2c7f-418f-bfbd-c8f56b74d79c" containerName="nova-metadata-metadata" containerID="cri-o://57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31" gracePeriod=30 Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.784845 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.908264 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-config-data\") pod \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.908316 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-combined-ca-bundle\") pod \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.908407 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-scripts\") pod \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.908640 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jp8hc\" (UniqueName: \"kubernetes.io/projected/baed508c-c3b8-40d5-a421-5121f9e3f8f5-kube-api-access-jp8hc\") pod \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\" (UID: \"baed508c-c3b8-40d5-a421-5121f9e3f8f5\") " Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.917308 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baed508c-c3b8-40d5-a421-5121f9e3f8f5-kube-api-access-jp8hc" (OuterVolumeSpecName: "kube-api-access-jp8hc") pod "baed508c-c3b8-40d5-a421-5121f9e3f8f5" (UID: "baed508c-c3b8-40d5-a421-5121f9e3f8f5"). InnerVolumeSpecName "kube-api-access-jp8hc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.919576 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-scripts" (OuterVolumeSpecName: "scripts") pod "baed508c-c3b8-40d5-a421-5121f9e3f8f5" (UID: "baed508c-c3b8-40d5-a421-5121f9e3f8f5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.951428 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "baed508c-c3b8-40d5-a421-5121f9e3f8f5" (UID: "baed508c-c3b8-40d5-a421-5121f9e3f8f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:32 crc kubenswrapper[4946]: I1204 15:27:32.963353 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-config-data" (OuterVolumeSpecName: "config-data") pod "baed508c-c3b8-40d5-a421-5121f9e3f8f5" (UID: "baed508c-c3b8-40d5-a421-5121f9e3f8f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.011239 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.011280 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.011296 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baed508c-c3b8-40d5-a421-5121f9e3f8f5-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.011307 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jp8hc\" (UniqueName: \"kubernetes.io/projected/baed508c-c3b8-40d5-a421-5121f9e3f8f5-kube-api-access-jp8hc\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.127282 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.214962 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-config-data\") pod \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.215039 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xn9f9\" (UniqueName: \"kubernetes.io/projected/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-kube-api-access-xn9f9\") pod \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.215155 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-combined-ca-bundle\") pod \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.215374 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-logs\") pod \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.215425 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-nova-metadata-tls-certs\") pod \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\" (UID: \"cefeef36-2c7f-418f-bfbd-c8f56b74d79c\") " Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.216014 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-logs" (OuterVolumeSpecName: "logs") pod "cefeef36-2c7f-418f-bfbd-c8f56b74d79c" (UID: "cefeef36-2c7f-418f-bfbd-c8f56b74d79c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.222361 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-kube-api-access-xn9f9" (OuterVolumeSpecName: "kube-api-access-xn9f9") pod "cefeef36-2c7f-418f-bfbd-c8f56b74d79c" (UID: "cefeef36-2c7f-418f-bfbd-c8f56b74d79c"). InnerVolumeSpecName "kube-api-access-xn9f9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.258243 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-config-data" (OuterVolumeSpecName: "config-data") pod "cefeef36-2c7f-418f-bfbd-c8f56b74d79c" (UID: "cefeef36-2c7f-418f-bfbd-c8f56b74d79c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.262394 4946 generic.go:334] "Generic (PLEG): container finished" podID="5bf2cf28-d180-411d-b617-6033ce853019" containerID="3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b" exitCode=0 Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.262463 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5bf2cf28-d180-411d-b617-6033ce853019","Type":"ContainerDied","Data":"3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b"} Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.269125 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hnrx8" event={"ID":"baed508c-c3b8-40d5-a421-5121f9e3f8f5","Type":"ContainerDied","Data":"0371ef6b364242501e59057248be19c057256e892953427fd85ac465023b0d3b"} Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.269176 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0371ef6b364242501e59057248be19c057256e892953427fd85ac465023b0d3b" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.271611 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hnrx8" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.272564 4946 generic.go:334] "Generic (PLEG): container finished" podID="cefeef36-2c7f-418f-bfbd-c8f56b74d79c" containerID="57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31" exitCode=0 Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.272607 4946 generic.go:334] "Generic (PLEG): container finished" podID="cefeef36-2c7f-418f-bfbd-c8f56b74d79c" containerID="38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a" exitCode=143 Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.272631 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cefeef36-2c7f-418f-bfbd-c8f56b74d79c","Type":"ContainerDied","Data":"57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31"} Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.272662 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cefeef36-2c7f-418f-bfbd-c8f56b74d79c","Type":"ContainerDied","Data":"38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a"} Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.272673 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cefeef36-2c7f-418f-bfbd-c8f56b74d79c","Type":"ContainerDied","Data":"7daeaaa03c778e01f72957b65cff9e76e6b7bb13c7427bd987579650242d37e8"} Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.272690 4946 scope.go:117] "RemoveContainer" containerID="57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.272826 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.282607 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cefeef36-2c7f-418f-bfbd-c8f56b74d79c" (UID: "cefeef36-2c7f-418f-bfbd-c8f56b74d79c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.295719 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "cefeef36-2c7f-418f-bfbd-c8f56b74d79c" (UID: "cefeef36-2c7f-418f-bfbd-c8f56b74d79c"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.315052 4946 scope.go:117] "RemoveContainer" containerID="38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.318604 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.318645 4946 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.318661 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.318680 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xn9f9\" (UniqueName: \"kubernetes.io/projected/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-kube-api-access-xn9f9\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.318694 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cefeef36-2c7f-418f-bfbd-c8f56b74d79c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.331323 4946 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b is running failed: container process not found" containerID="3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.331804 4946 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b is running failed: container process not found" containerID="3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.332327 4946 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b is running failed: container process not found" containerID="3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.332404 4946 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="5bf2cf28-d180-411d-b617-6033ce853019" containerName="nova-scheduler-scheduler" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.332525 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.333279 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53c3c8fa-f2df-43db-aa5b-cbee4b29d487" containerName="init" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.333304 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="53c3c8fa-f2df-43db-aa5b-cbee4b29d487" containerName="init" Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.333325 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e47ae595-d6e7-4eec-828f-98755b3d08b5" containerName="nova-manage" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.333335 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e47ae595-d6e7-4eec-828f-98755b3d08b5" containerName="nova-manage" Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.333350 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baed508c-c3b8-40d5-a421-5121f9e3f8f5" containerName="nova-cell1-conductor-db-sync" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.333362 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="baed508c-c3b8-40d5-a421-5121f9e3f8f5" containerName="nova-cell1-conductor-db-sync" Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.333374 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cefeef36-2c7f-418f-bfbd-c8f56b74d79c" containerName="nova-metadata-metadata" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.333381 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="cefeef36-2c7f-418f-bfbd-c8f56b74d79c" containerName="nova-metadata-metadata" Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.333395 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53c3c8fa-f2df-43db-aa5b-cbee4b29d487" containerName="dnsmasq-dns" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.333403 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="53c3c8fa-f2df-43db-aa5b-cbee4b29d487" containerName="dnsmasq-dns" Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.333430 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cefeef36-2c7f-418f-bfbd-c8f56b74d79c" containerName="nova-metadata-log" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.333441 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="cefeef36-2c7f-418f-bfbd-c8f56b74d79c" containerName="nova-metadata-log" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.333708 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="53c3c8fa-f2df-43db-aa5b-cbee4b29d487" containerName="dnsmasq-dns" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.333733 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="baed508c-c3b8-40d5-a421-5121f9e3f8f5" containerName="nova-cell1-conductor-db-sync" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.333751 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="cefeef36-2c7f-418f-bfbd-c8f56b74d79c" containerName="nova-metadata-metadata" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.333765 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="cefeef36-2c7f-418f-bfbd-c8f56b74d79c" containerName="nova-metadata-log" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.333786 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e47ae595-d6e7-4eec-828f-98755b3d08b5" containerName="nova-manage" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.334723 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.338523 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.372040 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.373867 4946 scope.go:117] "RemoveContainer" containerID="57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31" Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.374358 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31\": container with ID starting with 57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31 not found: ID does not exist" containerID="57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.374387 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31"} err="failed to get container status \"57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31\": rpc error: code = NotFound desc = could not find container \"57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31\": container with ID starting with 57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31 not found: ID does not exist" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.374410 4946 scope.go:117] "RemoveContainer" containerID="38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a" Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.374584 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a\": container with ID starting with 38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a not found: ID does not exist" containerID="38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.374605 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a"} err="failed to get container status \"38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a\": rpc error: code = NotFound desc = could not find container \"38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a\": container with ID starting with 38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a not found: ID does not exist" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.374621 4946 scope.go:117] "RemoveContainer" containerID="57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.374778 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31"} err="failed to get container status \"57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31\": rpc error: code = NotFound desc = could not find container \"57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31\": container with ID starting with 57740cd938e88f85b3b03c94cb13b5b79ce1b7634f1e563483f602093ba3cf31 not found: ID does not exist" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.374800 4946 scope.go:117] "RemoveContainer" containerID="38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.375057 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a"} err="failed to get container status \"38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a\": rpc error: code = NotFound desc = could not find container \"38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a\": container with ID starting with 38cee4632d208925a36fb41de025e110e2f48338232d0b53281fceb9beffd01a not found: ID does not exist" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.421327 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4664ae52-b2f1-43d1-a79f-75ccb8fc3a07-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"4664ae52-b2f1-43d1-a79f-75ccb8fc3a07\") " pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.421795 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4664ae52-b2f1-43d1-a79f-75ccb8fc3a07-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"4664ae52-b2f1-43d1-a79f-75ccb8fc3a07\") " pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.421917 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pvgz\" (UniqueName: \"kubernetes.io/projected/4664ae52-b2f1-43d1-a79f-75ccb8fc3a07-kube-api-access-9pvgz\") pod \"nova-cell1-conductor-0\" (UID: \"4664ae52-b2f1-43d1-a79f-75ccb8fc3a07\") " pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.524819 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4664ae52-b2f1-43d1-a79f-75ccb8fc3a07-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"4664ae52-b2f1-43d1-a79f-75ccb8fc3a07\") " pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.524957 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pvgz\" (UniqueName: \"kubernetes.io/projected/4664ae52-b2f1-43d1-a79f-75ccb8fc3a07-kube-api-access-9pvgz\") pod \"nova-cell1-conductor-0\" (UID: \"4664ae52-b2f1-43d1-a79f-75ccb8fc3a07\") " pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.525047 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4664ae52-b2f1-43d1-a79f-75ccb8fc3a07-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"4664ae52-b2f1-43d1-a79f-75ccb8fc3a07\") " pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.530454 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4664ae52-b2f1-43d1-a79f-75ccb8fc3a07-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"4664ae52-b2f1-43d1-a79f-75ccb8fc3a07\") " pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.532490 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4664ae52-b2f1-43d1-a79f-75ccb8fc3a07-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"4664ae52-b2f1-43d1-a79f-75ccb8fc3a07\") " pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.549430 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pvgz\" (UniqueName: \"kubernetes.io/projected/4664ae52-b2f1-43d1-a79f-75ccb8fc3a07-kube-api-access-9pvgz\") pod \"nova-cell1-conductor-0\" (UID: \"4664ae52-b2f1-43d1-a79f-75ccb8fc3a07\") " pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.600313 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.611848 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.612326 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.636711 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:33 crc kubenswrapper[4946]: E1204 15:27:33.637458 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bf2cf28-d180-411d-b617-6033ce853019" containerName="nova-scheduler-scheduler" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.637479 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bf2cf28-d180-411d-b617-6033ce853019" containerName="nova-scheduler-scheduler" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.637711 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bf2cf28-d180-411d-b617-6033ce853019" containerName="nova-scheduler-scheduler" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.638911 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.641043 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.641404 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.649283 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.664572 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.728460 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-combined-ca-bundle\") pod \"5bf2cf28-d180-411d-b617-6033ce853019\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.728983 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pvml\" (UniqueName: \"kubernetes.io/projected/5bf2cf28-d180-411d-b617-6033ce853019-kube-api-access-5pvml\") pod \"5bf2cf28-d180-411d-b617-6033ce853019\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.729086 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-config-data\") pod \"5bf2cf28-d180-411d-b617-6033ce853019\" (UID: \"5bf2cf28-d180-411d-b617-6033ce853019\") " Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.729553 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.729612 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.729655 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be834203-f4c5-4c00-a37a-7f8193bc6047-logs\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.729697 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn2hp\" (UniqueName: \"kubernetes.io/projected/be834203-f4c5-4c00-a37a-7f8193bc6047-kube-api-access-sn2hp\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.729749 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-config-data\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.735040 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bf2cf28-d180-411d-b617-6033ce853019-kube-api-access-5pvml" (OuterVolumeSpecName: "kube-api-access-5pvml") pod "5bf2cf28-d180-411d-b617-6033ce853019" (UID: "5bf2cf28-d180-411d-b617-6033ce853019"). InnerVolumeSpecName "kube-api-access-5pvml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.769651 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-config-data" (OuterVolumeSpecName: "config-data") pod "5bf2cf28-d180-411d-b617-6033ce853019" (UID: "5bf2cf28-d180-411d-b617-6033ce853019"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.832717 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5bf2cf28-d180-411d-b617-6033ce853019" (UID: "5bf2cf28-d180-411d-b617-6033ce853019"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.837655 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.837787 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.837861 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be834203-f4c5-4c00-a37a-7f8193bc6047-logs\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.837935 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn2hp\" (UniqueName: \"kubernetes.io/projected/be834203-f4c5-4c00-a37a-7f8193bc6047-kube-api-access-sn2hp\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.838766 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-config-data\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.839043 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.839066 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pvml\" (UniqueName: \"kubernetes.io/projected/5bf2cf28-d180-411d-b617-6033ce853019-kube-api-access-5pvml\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.839082 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bf2cf28-d180-411d-b617-6033ce853019-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.841108 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be834203-f4c5-4c00-a37a-7f8193bc6047-logs\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.847643 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.847786 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-config-data\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.850210 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.868851 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn2hp\" (UniqueName: \"kubernetes.io/projected/be834203-f4c5-4c00-a37a-7f8193bc6047-kube-api-access-sn2hp\") pod \"nova-metadata-0\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " pod="openstack/nova-metadata-0" Dec 04 15:27:33 crc kubenswrapper[4946]: I1204 15:27:33.952881 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.215267 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.289285 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.292104 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5bf2cf28-d180-411d-b617-6033ce853019","Type":"ContainerDied","Data":"5da3dadb1777d2465668f2d317e8a79ed045b3b58a5895ea885a6ade07c00ce0"} Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.292233 4946 scope.go:117] "RemoveContainer" containerID="3c1960c87f567c41f866a8541566ff728069d1096d697b62f905e87c516d491b" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.301373 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"4664ae52-b2f1-43d1-a79f-75ccb8fc3a07","Type":"ContainerStarted","Data":"c35b7d2af2650f830f40364148e31da4485f50d484411ced5882dbd24cd52b72"} Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.368668 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.388833 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.402916 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.404817 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.409435 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.412451 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:27:34 crc kubenswrapper[4946]: W1204 15:27:34.471533 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe834203_f4c5_4c00_a37a_7f8193bc6047.slice/crio-f26b9ce07fd0e9db5778f1a3aef7707d2dc7fb5241cb3a868e625d3e53abbecb WatchSource:0}: Error finding container f26b9ce07fd0e9db5778f1a3aef7707d2dc7fb5241cb3a868e625d3e53abbecb: Status 404 returned error can't find the container with id f26b9ce07fd0e9db5778f1a3aef7707d2dc7fb5241cb3a868e625d3e53abbecb Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.477557 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.564190 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-config-data\") pod \"nova-scheduler-0\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.564320 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg64t\" (UniqueName: \"kubernetes.io/projected/8d0bae15-c439-4171-93c7-c652891d31fe-kube-api-access-pg64t\") pod \"nova-scheduler-0\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.564358 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.668986 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-config-data\") pod \"nova-scheduler-0\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.670927 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg64t\" (UniqueName: \"kubernetes.io/projected/8d0bae15-c439-4171-93c7-c652891d31fe-kube-api-access-pg64t\") pod \"nova-scheduler-0\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.670967 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.689435 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-config-data\") pod \"nova-scheduler-0\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.693871 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.732819 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg64t\" (UniqueName: \"kubernetes.io/projected/8d0bae15-c439-4171-93c7-c652891d31fe-kube-api-access-pg64t\") pod \"nova-scheduler-0\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " pod="openstack/nova-scheduler-0" Dec 04 15:27:34 crc kubenswrapper[4946]: I1204 15:27:34.746665 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.329561 4946 generic.go:334] "Generic (PLEG): container finished" podID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerID="bb4424e4af8ae228360eca8ec81e8deb2bd2964c02861942ca904a63e17ed685" exitCode=0 Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.329644 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a5e7e17-c32e-4713-ba05-de91486be60d","Type":"ContainerDied","Data":"bb4424e4af8ae228360eca8ec81e8deb2bd2964c02861942ca904a63e17ed685"} Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.333041 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"be834203-f4c5-4c00-a37a-7f8193bc6047","Type":"ContainerStarted","Data":"67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644"} Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.333081 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"be834203-f4c5-4c00-a37a-7f8193bc6047","Type":"ContainerStarted","Data":"f26b9ce07fd0e9db5778f1a3aef7707d2dc7fb5241cb3a868e625d3e53abbecb"} Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.336366 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"4664ae52-b2f1-43d1-a79f-75ccb8fc3a07","Type":"ContainerStarted","Data":"1b03e0df665675e4731f25359e28f32e34994598cb9f9d9cdbdf62a3c28a1d9d"} Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.336567 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.428377 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.4283523799999998 podStartE2EDuration="2.42835238s" podCreationTimestamp="2025-12-04 15:27:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:27:35.35785396 +0000 UTC m=+1506.243897601" watchObservedRunningTime="2025-12-04 15:27:35.42835238 +0000 UTC m=+1506.314396021" Dec 04 15:27:35 crc kubenswrapper[4946]: W1204 15:27:35.428390 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d0bae15_c439_4171_93c7_c652891d31fe.slice/crio-5a4167d61728d84f9e71189844d4332639a6c5f9183a557112224467ef8af3aa WatchSource:0}: Error finding container 5a4167d61728d84f9e71189844d4332639a6c5f9183a557112224467ef8af3aa: Status 404 returned error can't find the container with id 5a4167d61728d84f9e71189844d4332639a6c5f9183a557112224467ef8af3aa Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.430201 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.468103 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bf2cf28-d180-411d-b617-6033ce853019" path="/var/lib/kubelet/pods/5bf2cf28-d180-411d-b617-6033ce853019/volumes" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.469385 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cefeef36-2c7f-418f-bfbd-c8f56b74d79c" path="/var/lib/kubelet/pods/cefeef36-2c7f-418f-bfbd-c8f56b74d79c/volumes" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.484263 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.599656 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-combined-ca-bundle\") pod \"6a5e7e17-c32e-4713-ba05-de91486be60d\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.600182 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-config-data\") pod \"6a5e7e17-c32e-4713-ba05-de91486be60d\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.600213 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7x2z2\" (UniqueName: \"kubernetes.io/projected/6a5e7e17-c32e-4713-ba05-de91486be60d-kube-api-access-7x2z2\") pod \"6a5e7e17-c32e-4713-ba05-de91486be60d\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.600284 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a5e7e17-c32e-4713-ba05-de91486be60d-logs\") pod \"6a5e7e17-c32e-4713-ba05-de91486be60d\" (UID: \"6a5e7e17-c32e-4713-ba05-de91486be60d\") " Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.601367 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a5e7e17-c32e-4713-ba05-de91486be60d-logs" (OuterVolumeSpecName: "logs") pod "6a5e7e17-c32e-4713-ba05-de91486be60d" (UID: "6a5e7e17-c32e-4713-ba05-de91486be60d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.605106 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a5e7e17-c32e-4713-ba05-de91486be60d-kube-api-access-7x2z2" (OuterVolumeSpecName: "kube-api-access-7x2z2") pod "6a5e7e17-c32e-4713-ba05-de91486be60d" (UID: "6a5e7e17-c32e-4713-ba05-de91486be60d"). InnerVolumeSpecName "kube-api-access-7x2z2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.632792 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-config-data" (OuterVolumeSpecName: "config-data") pod "6a5e7e17-c32e-4713-ba05-de91486be60d" (UID: "6a5e7e17-c32e-4713-ba05-de91486be60d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.639380 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6a5e7e17-c32e-4713-ba05-de91486be60d" (UID: "6a5e7e17-c32e-4713-ba05-de91486be60d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.702554 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.702596 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7x2z2\" (UniqueName: \"kubernetes.io/projected/6a5e7e17-c32e-4713-ba05-de91486be60d-kube-api-access-7x2z2\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.702609 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a5e7e17-c32e-4713-ba05-de91486be60d-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:35 crc kubenswrapper[4946]: I1204 15:27:35.702618 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a5e7e17-c32e-4713-ba05-de91486be60d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.355769 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8d0bae15-c439-4171-93c7-c652891d31fe","Type":"ContainerStarted","Data":"5a4167d61728d84f9e71189844d4332639a6c5f9183a557112224467ef8af3aa"} Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.360404 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a5e7e17-c32e-4713-ba05-de91486be60d","Type":"ContainerDied","Data":"b1e53d64686ea16b987a72a5fe6f7b9c22a78f5130c5808b5e3f266147027854"} Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.360433 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.360527 4946 scope.go:117] "RemoveContainer" containerID="bb4424e4af8ae228360eca8ec81e8deb2bd2964c02861942ca904a63e17ed685" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.412928 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.413094 4946 scope.go:117] "RemoveContainer" containerID="48d8d77524f46ff21bb850733a59702531d5dae490d1bb444868143d1aee0fc4" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.431704 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.442105 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 04 15:27:36 crc kubenswrapper[4946]: E1204 15:27:36.442678 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerName="nova-api-api" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.442705 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerName="nova-api-api" Dec 04 15:27:36 crc kubenswrapper[4946]: E1204 15:27:36.442738 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerName="nova-api-log" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.442748 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerName="nova-api-log" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.442987 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerName="nova-api-log" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.443025 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a5e7e17-c32e-4713-ba05-de91486be60d" containerName="nova-api-api" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.444338 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.453636 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.481510 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.537435 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-config-data\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.537544 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.539739 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63a28e83-a076-47e9-9072-0c1e196a85df-logs\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.540252 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqw4r\" (UniqueName: \"kubernetes.io/projected/63a28e83-a076-47e9-9072-0c1e196a85df-kube-api-access-dqw4r\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.642401 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63a28e83-a076-47e9-9072-0c1e196a85df-logs\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.642479 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqw4r\" (UniqueName: \"kubernetes.io/projected/63a28e83-a076-47e9-9072-0c1e196a85df-kube-api-access-dqw4r\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.642550 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-config-data\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.642587 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.643016 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63a28e83-a076-47e9-9072-0c1e196a85df-logs\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.656291 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.656503 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-config-data\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.660667 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqw4r\" (UniqueName: \"kubernetes.io/projected/63a28e83-a076-47e9-9072-0c1e196a85df-kube-api-access-dqw4r\") pod \"nova-api-0\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " pod="openstack/nova-api-0" Dec 04 15:27:36 crc kubenswrapper[4946]: I1204 15:27:36.783003 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:27:37 crc kubenswrapper[4946]: I1204 15:27:37.372671 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8d0bae15-c439-4171-93c7-c652891d31fe","Type":"ContainerStarted","Data":"35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c"} Dec 04 15:27:37 crc kubenswrapper[4946]: I1204 15:27:37.380661 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"be834203-f4c5-4c00-a37a-7f8193bc6047","Type":"ContainerStarted","Data":"d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530"} Dec 04 15:27:37 crc kubenswrapper[4946]: I1204 15:27:37.405793 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:27:37 crc kubenswrapper[4946]: I1204 15:27:37.416190 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.416168547 podStartE2EDuration="3.416168547s" podCreationTimestamp="2025-12-04 15:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:27:37.392343725 +0000 UTC m=+1508.278387366" watchObservedRunningTime="2025-12-04 15:27:37.416168547 +0000 UTC m=+1508.302212188" Dec 04 15:27:37 crc kubenswrapper[4946]: W1204 15:27:37.419375 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63a28e83_a076_47e9_9072_0c1e196a85df.slice/crio-4420b628431bd8e8e0954812e65039c9a23345c10cb1a0c5efc9adf14320ab17 WatchSource:0}: Error finding container 4420b628431bd8e8e0954812e65039c9a23345c10cb1a0c5efc9adf14320ab17: Status 404 returned error can't find the container with id 4420b628431bd8e8e0954812e65039c9a23345c10cb1a0c5efc9adf14320ab17 Dec 04 15:27:37 crc kubenswrapper[4946]: I1204 15:27:37.436635 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.436608288 podStartE2EDuration="4.436608288s" podCreationTimestamp="2025-12-04 15:27:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:27:37.412754575 +0000 UTC m=+1508.298798216" watchObservedRunningTime="2025-12-04 15:27:37.436608288 +0000 UTC m=+1508.322651949" Dec 04 15:27:37 crc kubenswrapper[4946]: I1204 15:27:37.484723 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a5e7e17-c32e-4713-ba05-de91486be60d" path="/var/lib/kubelet/pods/6a5e7e17-c32e-4713-ba05-de91486be60d/volumes" Dec 04 15:27:38 crc kubenswrapper[4946]: I1204 15:27:38.397101 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63a28e83-a076-47e9-9072-0c1e196a85df","Type":"ContainerStarted","Data":"22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207"} Dec 04 15:27:38 crc kubenswrapper[4946]: I1204 15:27:38.397599 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63a28e83-a076-47e9-9072-0c1e196a85df","Type":"ContainerStarted","Data":"fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43"} Dec 04 15:27:38 crc kubenswrapper[4946]: I1204 15:27:38.397613 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63a28e83-a076-47e9-9072-0c1e196a85df","Type":"ContainerStarted","Data":"4420b628431bd8e8e0954812e65039c9a23345c10cb1a0c5efc9adf14320ab17"} Dec 04 15:27:38 crc kubenswrapper[4946]: I1204 15:27:38.425606 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.425578922 podStartE2EDuration="2.425578922s" podCreationTimestamp="2025-12-04 15:27:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:27:38.415991604 +0000 UTC m=+1509.302035255" watchObservedRunningTime="2025-12-04 15:27:38.425578922 +0000 UTC m=+1509.311622573" Dec 04 15:27:38 crc kubenswrapper[4946]: I1204 15:27:38.953768 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 04 15:27:38 crc kubenswrapper[4946]: I1204 15:27:38.954355 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 04 15:27:39 crc kubenswrapper[4946]: I1204 15:27:39.748027 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 04 15:27:42 crc kubenswrapper[4946]: I1204 15:27:42.058659 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 04 15:27:43 crc kubenswrapper[4946]: I1204 15:27:43.705073 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 04 15:27:43 crc kubenswrapper[4946]: I1204 15:27:43.953634 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 04 15:27:43 crc kubenswrapper[4946]: I1204 15:27:43.955361 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 04 15:27:44 crc kubenswrapper[4946]: I1204 15:27:44.748459 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 04 15:27:44 crc kubenswrapper[4946]: I1204 15:27:44.824888 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 04 15:27:44 crc kubenswrapper[4946]: I1204 15:27:44.966751 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.218:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 15:27:44 crc kubenswrapper[4946]: I1204 15:27:44.967002 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.218:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 15:27:45 crc kubenswrapper[4946]: I1204 15:27:45.553668 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 04 15:27:46 crc kubenswrapper[4946]: I1204 15:27:46.305457 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 15:27:46 crc kubenswrapper[4946]: I1204 15:27:46.306721 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="f81b9295-1cdf-44a6-afef-1380c1e3cf54" containerName="kube-state-metrics" containerID="cri-o://4bf23230ab38e503d2a2a21f70c8a2d5390fe12439cfed141fe9b11cd8012bdd" gracePeriod=30 Dec 04 15:27:46 crc kubenswrapper[4946]: I1204 15:27:46.522488 4946 generic.go:334] "Generic (PLEG): container finished" podID="f81b9295-1cdf-44a6-afef-1380c1e3cf54" containerID="4bf23230ab38e503d2a2a21f70c8a2d5390fe12439cfed141fe9b11cd8012bdd" exitCode=2 Dec 04 15:27:46 crc kubenswrapper[4946]: I1204 15:27:46.522576 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f81b9295-1cdf-44a6-afef-1380c1e3cf54","Type":"ContainerDied","Data":"4bf23230ab38e503d2a2a21f70c8a2d5390fe12439cfed141fe9b11cd8012bdd"} Dec 04 15:27:46 crc kubenswrapper[4946]: I1204 15:27:46.784938 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 15:27:46 crc kubenswrapper[4946]: I1204 15:27:46.784997 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 15:27:46 crc kubenswrapper[4946]: I1204 15:27:46.949152 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.027283 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssfmc\" (UniqueName: \"kubernetes.io/projected/f81b9295-1cdf-44a6-afef-1380c1e3cf54-kube-api-access-ssfmc\") pod \"f81b9295-1cdf-44a6-afef-1380c1e3cf54\" (UID: \"f81b9295-1cdf-44a6-afef-1380c1e3cf54\") " Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.036892 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f81b9295-1cdf-44a6-afef-1380c1e3cf54-kube-api-access-ssfmc" (OuterVolumeSpecName: "kube-api-access-ssfmc") pod "f81b9295-1cdf-44a6-afef-1380c1e3cf54" (UID: "f81b9295-1cdf-44a6-afef-1380c1e3cf54"). InnerVolumeSpecName "kube-api-access-ssfmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.130515 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssfmc\" (UniqueName: \"kubernetes.io/projected/f81b9295-1cdf-44a6-afef-1380c1e3cf54-kube-api-access-ssfmc\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.546445 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f81b9295-1cdf-44a6-afef-1380c1e3cf54","Type":"ContainerDied","Data":"f6674fb05bee621d31a43aba76b5c5ab5e463d48671c9751f4891845957f138c"} Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.546524 4946 scope.go:117] "RemoveContainer" containerID="4bf23230ab38e503d2a2a21f70c8a2d5390fe12439cfed141fe9b11cd8012bdd" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.546734 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.579714 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.601307 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.615680 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 15:27:47 crc kubenswrapper[4946]: E1204 15:27:47.616290 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81b9295-1cdf-44a6-afef-1380c1e3cf54" containerName="kube-state-metrics" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.616321 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81b9295-1cdf-44a6-afef-1380c1e3cf54" containerName="kube-state-metrics" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.616597 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f81b9295-1cdf-44a6-afef-1380c1e3cf54" containerName="kube-state-metrics" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.617515 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.627448 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.628459 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.641388 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.751614 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bb3c93e-3400-4b38-bc6d-733a1d345435-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.751777 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lg8d\" (UniqueName: \"kubernetes.io/projected/7bb3c93e-3400-4b38-bc6d-733a1d345435-kube-api-access-8lg8d\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.751836 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bb3c93e-3400-4b38-bc6d-733a1d345435-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.751922 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/7bb3c93e-3400-4b38-bc6d-733a1d345435-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.853317 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lg8d\" (UniqueName: \"kubernetes.io/projected/7bb3c93e-3400-4b38-bc6d-733a1d345435-kube-api-access-8lg8d\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.853633 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bb3c93e-3400-4b38-bc6d-733a1d345435-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.853680 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/7bb3c93e-3400-4b38-bc6d-733a1d345435-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.853710 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bb3c93e-3400-4b38-bc6d-733a1d345435-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.871931 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="63a28e83-a076-47e9-9072-0c1e196a85df" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.220:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.872516 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="63a28e83-a076-47e9-9072-0c1e196a85df" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.220:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.874626 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bb3c93e-3400-4b38-bc6d-733a1d345435-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.876096 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bb3c93e-3400-4b38-bc6d-733a1d345435-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.900263 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/7bb3c93e-3400-4b38-bc6d-733a1d345435-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.900350 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lg8d\" (UniqueName: \"kubernetes.io/projected/7bb3c93e-3400-4b38-bc6d-733a1d345435-kube-api-access-8lg8d\") pod \"kube-state-metrics-0\" (UID: \"7bb3c93e-3400-4b38-bc6d-733a1d345435\") " pod="openstack/kube-state-metrics-0" Dec 04 15:27:47 crc kubenswrapper[4946]: I1204 15:27:47.970013 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 15:27:48 crc kubenswrapper[4946]: I1204 15:27:48.493882 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 15:27:48 crc kubenswrapper[4946]: W1204 15:27:48.494572 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bb3c93e_3400_4b38_bc6d_733a1d345435.slice/crio-442a28f2a3753cadfaf6b2e84f15aee24446bcba92f6d66288e19f73a7b2f613 WatchSource:0}: Error finding container 442a28f2a3753cadfaf6b2e84f15aee24446bcba92f6d66288e19f73a7b2f613: Status 404 returned error can't find the container with id 442a28f2a3753cadfaf6b2e84f15aee24446bcba92f6d66288e19f73a7b2f613 Dec 04 15:27:48 crc kubenswrapper[4946]: I1204 15:27:48.558229 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7bb3c93e-3400-4b38-bc6d-733a1d345435","Type":"ContainerStarted","Data":"442a28f2a3753cadfaf6b2e84f15aee24446bcba92f6d66288e19f73a7b2f613"} Dec 04 15:27:48 crc kubenswrapper[4946]: I1204 15:27:48.997841 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:48 crc kubenswrapper[4946]: I1204 15:27:48.998541 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="ceilometer-central-agent" containerID="cri-o://6f2514cd25ee648dce8d435a0f300ecbf3949eaa715a9ae7aeed889d136d53dd" gracePeriod=30 Dec 04 15:27:48 crc kubenswrapper[4946]: I1204 15:27:48.998821 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="proxy-httpd" containerID="cri-o://f523307d41279f5eac58293824890658feb3ea5f6f534e3affe5e0962b8c2267" gracePeriod=30 Dec 04 15:27:48 crc kubenswrapper[4946]: I1204 15:27:48.999048 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="ceilometer-notification-agent" containerID="cri-o://1b2e09a2b75199b84e8b68ab565afce2c1bd6b0301f2fc0e34280099347a4b46" gracePeriod=30 Dec 04 15:27:48 crc kubenswrapper[4946]: I1204 15:27:48.999099 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="sg-core" containerID="cri-o://074575e82961cbf82b2a18524211c93c9f115c3ee2fcf3818684c001b7b49357" gracePeriod=30 Dec 04 15:27:49 crc kubenswrapper[4946]: I1204 15:27:49.477501 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f81b9295-1cdf-44a6-afef-1380c1e3cf54" path="/var/lib/kubelet/pods/f81b9295-1cdf-44a6-afef-1380c1e3cf54/volumes" Dec 04 15:27:49 crc kubenswrapper[4946]: I1204 15:27:49.571049 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7bb3c93e-3400-4b38-bc6d-733a1d345435","Type":"ContainerStarted","Data":"cfb5357c6eba71f3216b7030730b2677a936153cd77dfce5ae93fdb4b50cfa30"} Dec 04 15:27:49 crc kubenswrapper[4946]: I1204 15:27:49.571151 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 04 15:27:49 crc kubenswrapper[4946]: I1204 15:27:49.574984 4946 generic.go:334] "Generic (PLEG): container finished" podID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerID="f523307d41279f5eac58293824890658feb3ea5f6f534e3affe5e0962b8c2267" exitCode=0 Dec 04 15:27:49 crc kubenswrapper[4946]: I1204 15:27:49.575012 4946 generic.go:334] "Generic (PLEG): container finished" podID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerID="074575e82961cbf82b2a18524211c93c9f115c3ee2fcf3818684c001b7b49357" exitCode=2 Dec 04 15:27:49 crc kubenswrapper[4946]: I1204 15:27:49.575028 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f81e4d9b-578a-4656-ac39-a36738ae194f","Type":"ContainerDied","Data":"f523307d41279f5eac58293824890658feb3ea5f6f534e3affe5e0962b8c2267"} Dec 04 15:27:49 crc kubenswrapper[4946]: I1204 15:27:49.575044 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f81e4d9b-578a-4656-ac39-a36738ae194f","Type":"ContainerDied","Data":"074575e82961cbf82b2a18524211c93c9f115c3ee2fcf3818684c001b7b49357"} Dec 04 15:27:49 crc kubenswrapper[4946]: I1204 15:27:49.594363 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.178597042 podStartE2EDuration="2.594346497s" podCreationTimestamp="2025-12-04 15:27:47 +0000 UTC" firstStartedPulling="2025-12-04 15:27:48.497934156 +0000 UTC m=+1519.383977797" lastFinishedPulling="2025-12-04 15:27:48.913683611 +0000 UTC m=+1519.799727252" observedRunningTime="2025-12-04 15:27:49.59074894 +0000 UTC m=+1520.476792581" watchObservedRunningTime="2025-12-04 15:27:49.594346497 +0000 UTC m=+1520.480390138" Dec 04 15:27:50 crc kubenswrapper[4946]: I1204 15:27:50.590868 4946 generic.go:334] "Generic (PLEG): container finished" podID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerID="1b2e09a2b75199b84e8b68ab565afce2c1bd6b0301f2fc0e34280099347a4b46" exitCode=0 Dec 04 15:27:50 crc kubenswrapper[4946]: I1204 15:27:50.591329 4946 generic.go:334] "Generic (PLEG): container finished" podID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerID="6f2514cd25ee648dce8d435a0f300ecbf3949eaa715a9ae7aeed889d136d53dd" exitCode=0 Dec 04 15:27:50 crc kubenswrapper[4946]: I1204 15:27:50.590940 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f81e4d9b-578a-4656-ac39-a36738ae194f","Type":"ContainerDied","Data":"1b2e09a2b75199b84e8b68ab565afce2c1bd6b0301f2fc0e34280099347a4b46"} Dec 04 15:27:50 crc kubenswrapper[4946]: I1204 15:27:50.591452 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f81e4d9b-578a-4656-ac39-a36738ae194f","Type":"ContainerDied","Data":"6f2514cd25ee648dce8d435a0f300ecbf3949eaa715a9ae7aeed889d136d53dd"} Dec 04 15:27:50 crc kubenswrapper[4946]: I1204 15:27:50.996723 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.142382 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rcp6\" (UniqueName: \"kubernetes.io/projected/f81e4d9b-578a-4656-ac39-a36738ae194f-kube-api-access-6rcp6\") pod \"f81e4d9b-578a-4656-ac39-a36738ae194f\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.142550 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-config-data\") pod \"f81e4d9b-578a-4656-ac39-a36738ae194f\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.142591 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-sg-core-conf-yaml\") pod \"f81e4d9b-578a-4656-ac39-a36738ae194f\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.142629 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-scripts\") pod \"f81e4d9b-578a-4656-ac39-a36738ae194f\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.142676 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-run-httpd\") pod \"f81e4d9b-578a-4656-ac39-a36738ae194f\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.142728 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-combined-ca-bundle\") pod \"f81e4d9b-578a-4656-ac39-a36738ae194f\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.142758 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-log-httpd\") pod \"f81e4d9b-578a-4656-ac39-a36738ae194f\" (UID: \"f81e4d9b-578a-4656-ac39-a36738ae194f\") " Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.143830 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f81e4d9b-578a-4656-ac39-a36738ae194f" (UID: "f81e4d9b-578a-4656-ac39-a36738ae194f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.144162 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f81e4d9b-578a-4656-ac39-a36738ae194f" (UID: "f81e4d9b-578a-4656-ac39-a36738ae194f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.150062 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f81e4d9b-578a-4656-ac39-a36738ae194f-kube-api-access-6rcp6" (OuterVolumeSpecName: "kube-api-access-6rcp6") pod "f81e4d9b-578a-4656-ac39-a36738ae194f" (UID: "f81e4d9b-578a-4656-ac39-a36738ae194f"). InnerVolumeSpecName "kube-api-access-6rcp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.150523 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-scripts" (OuterVolumeSpecName: "scripts") pod "f81e4d9b-578a-4656-ac39-a36738ae194f" (UID: "f81e4d9b-578a-4656-ac39-a36738ae194f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.180135 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f81e4d9b-578a-4656-ac39-a36738ae194f" (UID: "f81e4d9b-578a-4656-ac39-a36738ae194f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.233936 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f81e4d9b-578a-4656-ac39-a36738ae194f" (UID: "f81e4d9b-578a-4656-ac39-a36738ae194f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.244910 4946 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.244956 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rcp6\" (UniqueName: \"kubernetes.io/projected/f81e4d9b-578a-4656-ac39-a36738ae194f-kube-api-access-6rcp6\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.244971 4946 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.244983 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.244995 4946 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f81e4d9b-578a-4656-ac39-a36738ae194f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.245007 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.270339 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-config-data" (OuterVolumeSpecName: "config-data") pod "f81e4d9b-578a-4656-ac39-a36738ae194f" (UID: "f81e4d9b-578a-4656-ac39-a36738ae194f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.347287 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f81e4d9b-578a-4656-ac39-a36738ae194f-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.610192 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f81e4d9b-578a-4656-ac39-a36738ae194f","Type":"ContainerDied","Data":"511ee1cca42490ec636ab9a670ab987198efa3a13633c0752eabf77bbdda769c"} Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.610249 4946 scope.go:117] "RemoveContainer" containerID="f523307d41279f5eac58293824890658feb3ea5f6f534e3affe5e0962b8c2267" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.610265 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.638059 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.643966 4946 scope.go:117] "RemoveContainer" containerID="074575e82961cbf82b2a18524211c93c9f115c3ee2fcf3818684c001b7b49357" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.648646 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.662358 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:51 crc kubenswrapper[4946]: E1204 15:27:51.662795 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="ceilometer-notification-agent" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.662816 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="ceilometer-notification-agent" Dec 04 15:27:51 crc kubenswrapper[4946]: E1204 15:27:51.662835 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="sg-core" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.662842 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="sg-core" Dec 04 15:27:51 crc kubenswrapper[4946]: E1204 15:27:51.662863 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="proxy-httpd" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.662868 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="proxy-httpd" Dec 04 15:27:51 crc kubenswrapper[4946]: E1204 15:27:51.662877 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="ceilometer-central-agent" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.662884 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="ceilometer-central-agent" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.663093 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="ceilometer-notification-agent" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.663140 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="ceilometer-central-agent" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.663153 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="proxy-httpd" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.663171 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" containerName="sg-core" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.665917 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.678873 4946 scope.go:117] "RemoveContainer" containerID="1b2e09a2b75199b84e8b68ab565afce2c1bd6b0301f2fc0e34280099347a4b46" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.679261 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.679279 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.679579 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.693038 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.724901 4946 scope.go:117] "RemoveContainer" containerID="6f2514cd25ee648dce8d435a0f300ecbf3949eaa715a9ae7aeed889d136d53dd" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.757511 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.757562 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-config-data\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.757586 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-run-httpd\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.757610 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-log-httpd\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.757633 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-scripts\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.757697 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n28n6\" (UniqueName: \"kubernetes.io/projected/a7d63caa-e471-453a-b2f4-7c09bd2b451e-kube-api-access-n28n6\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.758043 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.758174 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.860286 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.861040 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.861257 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.861282 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-config-data\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.861313 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-run-httpd\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.861347 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-log-httpd\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.861372 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-scripts\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.861407 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n28n6\" (UniqueName: \"kubernetes.io/projected/a7d63caa-e471-453a-b2f4-7c09bd2b451e-kube-api-access-n28n6\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.861980 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-run-httpd\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.862032 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-log-httpd\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.864755 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.865382 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-scripts\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.865572 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-config-data\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.866223 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.866537 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.886486 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n28n6\" (UniqueName: \"kubernetes.io/projected/a7d63caa-e471-453a-b2f4-7c09bd2b451e-kube-api-access-n28n6\") pod \"ceilometer-0\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " pod="openstack/ceilometer-0" Dec 04 15:27:51 crc kubenswrapper[4946]: I1204 15:27:51.994791 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:27:52 crc kubenswrapper[4946]: I1204 15:27:52.495025 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:27:52 crc kubenswrapper[4946]: W1204 15:27:52.514551 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7d63caa_e471_453a_b2f4_7c09bd2b451e.slice/crio-ce953529618156c763198a8801325383f126887d4a0f3503726c36fa82a1cb7f WatchSource:0}: Error finding container ce953529618156c763198a8801325383f126887d4a0f3503726c36fa82a1cb7f: Status 404 returned error can't find the container with id ce953529618156c763198a8801325383f126887d4a0f3503726c36fa82a1cb7f Dec 04 15:27:52 crc kubenswrapper[4946]: I1204 15:27:52.621733 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d63caa-e471-453a-b2f4-7c09bd2b451e","Type":"ContainerStarted","Data":"ce953529618156c763198a8801325383f126887d4a0f3503726c36fa82a1cb7f"} Dec 04 15:27:53 crc kubenswrapper[4946]: I1204 15:27:53.494989 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f81e4d9b-578a-4656-ac39-a36738ae194f" path="/var/lib/kubelet/pods/f81e4d9b-578a-4656-ac39-a36738ae194f/volumes" Dec 04 15:27:53 crc kubenswrapper[4946]: I1204 15:27:53.657415 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d63caa-e471-453a-b2f4-7c09bd2b451e","Type":"ContainerStarted","Data":"14befa07830fa576c9114c6337089169bd62960f660ee5a0771ee6fa23ca1ca1"} Dec 04 15:27:53 crc kubenswrapper[4946]: I1204 15:27:53.960273 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 04 15:27:53 crc kubenswrapper[4946]: I1204 15:27:53.964418 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 04 15:27:53 crc kubenswrapper[4946]: I1204 15:27:53.968325 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 04 15:27:54 crc kubenswrapper[4946]: I1204 15:27:54.680040 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d63caa-e471-453a-b2f4-7c09bd2b451e","Type":"ContainerStarted","Data":"68fdd3bfda606c140c2c9ac092bb0c181ab26f65330b27a6493557866a421603"} Dec 04 15:27:54 crc kubenswrapper[4946]: I1204 15:27:54.690455 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.535536 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.554552 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtzp8\" (UniqueName: \"kubernetes.io/projected/b01d659d-7246-4db4-bce4-1d81adc7bb5b-kube-api-access-jtzp8\") pod \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.554685 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-config-data\") pod \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.554844 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-combined-ca-bundle\") pod \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\" (UID: \"b01d659d-7246-4db4-bce4-1d81adc7bb5b\") " Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.560451 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b01d659d-7246-4db4-bce4-1d81adc7bb5b-kube-api-access-jtzp8" (OuterVolumeSpecName: "kube-api-access-jtzp8") pod "b01d659d-7246-4db4-bce4-1d81adc7bb5b" (UID: "b01d659d-7246-4db4-bce4-1d81adc7bb5b"). InnerVolumeSpecName "kube-api-access-jtzp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.599351 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b01d659d-7246-4db4-bce4-1d81adc7bb5b" (UID: "b01d659d-7246-4db4-bce4-1d81adc7bb5b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.607237 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-config-data" (OuterVolumeSpecName: "config-data") pod "b01d659d-7246-4db4-bce4-1d81adc7bb5b" (UID: "b01d659d-7246-4db4-bce4-1d81adc7bb5b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.657607 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.658201 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtzp8\" (UniqueName: \"kubernetes.io/projected/b01d659d-7246-4db4-bce4-1d81adc7bb5b-kube-api-access-jtzp8\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.658296 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b01d659d-7246-4db4-bce4-1d81adc7bb5b-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.691564 4946 generic.go:334] "Generic (PLEG): container finished" podID="b01d659d-7246-4db4-bce4-1d81adc7bb5b" containerID="ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3" exitCode=137 Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.691629 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.691634 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b01d659d-7246-4db4-bce4-1d81adc7bb5b","Type":"ContainerDied","Data":"ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3"} Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.691820 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b01d659d-7246-4db4-bce4-1d81adc7bb5b","Type":"ContainerDied","Data":"e4d6f1789d0bad302dfd8fbd50c34dfaef4ddbf2900f93051acf09a35ee735e5"} Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.691867 4946 scope.go:117] "RemoveContainer" containerID="ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.695790 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d63caa-e471-453a-b2f4-7c09bd2b451e","Type":"ContainerStarted","Data":"90cf25673fe2cb94603e09dcc13ca04006aceca7550f2ce25eb72ced120c8b9d"} Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.722712 4946 scope.go:117] "RemoveContainer" containerID="ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3" Dec 04 15:27:55 crc kubenswrapper[4946]: E1204 15:27:55.723368 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3\": container with ID starting with ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3 not found: ID does not exist" containerID="ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.723423 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3"} err="failed to get container status \"ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3\": rpc error: code = NotFound desc = could not find container \"ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3\": container with ID starting with ff23d4a30ecb5bdfbbd8cae6703210287ebb2d2a07eae5d5ba9b1883bdf642c3 not found: ID does not exist" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.736529 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.748468 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.761687 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 15:27:55 crc kubenswrapper[4946]: E1204 15:27:55.762199 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b01d659d-7246-4db4-bce4-1d81adc7bb5b" containerName="nova-cell1-novncproxy-novncproxy" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.762223 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b01d659d-7246-4db4-bce4-1d81adc7bb5b" containerName="nova-cell1-novncproxy-novncproxy" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.762480 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="b01d659d-7246-4db4-bce4-1d81adc7bb5b" containerName="nova-cell1-novncproxy-novncproxy" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.763275 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.766396 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.766874 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.767101 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.776771 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.862843 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.863093 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.863159 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.863193 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4srf\" (UniqueName: \"kubernetes.io/projected/2f2624be-b71d-475e-a895-515905f6ef24-kube-api-access-h4srf\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.863277 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.965053 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.965147 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.965179 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4srf\" (UniqueName: \"kubernetes.io/projected/2f2624be-b71d-475e-a895-515905f6ef24-kube-api-access-h4srf\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.965239 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.965382 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.970575 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.971286 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.971860 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.974772 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f2624be-b71d-475e-a895-515905f6ef24-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:55 crc kubenswrapper[4946]: I1204 15:27:55.987029 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4srf\" (UniqueName: \"kubernetes.io/projected/2f2624be-b71d-475e-a895-515905f6ef24-kube-api-access-h4srf\") pod \"nova-cell1-novncproxy-0\" (UID: \"2f2624be-b71d-475e-a895-515905f6ef24\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:56 crc kubenswrapper[4946]: I1204 15:27:56.121630 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:27:56 crc kubenswrapper[4946]: I1204 15:27:56.629303 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 15:27:56 crc kubenswrapper[4946]: W1204 15:27:56.635151 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2f2624be_b71d_475e_a895_515905f6ef24.slice/crio-88d0f66e049ee9a73f628bfb8b77d01a60ff18291882b9ed231164ea9b7f23b8 WatchSource:0}: Error finding container 88d0f66e049ee9a73f628bfb8b77d01a60ff18291882b9ed231164ea9b7f23b8: Status 404 returned error can't find the container with id 88d0f66e049ee9a73f628bfb8b77d01a60ff18291882b9ed231164ea9b7f23b8 Dec 04 15:27:56 crc kubenswrapper[4946]: I1204 15:27:56.709977 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2f2624be-b71d-475e-a895-515905f6ef24","Type":"ContainerStarted","Data":"88d0f66e049ee9a73f628bfb8b77d01a60ff18291882b9ed231164ea9b7f23b8"} Dec 04 15:27:56 crc kubenswrapper[4946]: I1204 15:27:56.791761 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 04 15:27:56 crc kubenswrapper[4946]: I1204 15:27:56.792973 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 04 15:27:56 crc kubenswrapper[4946]: I1204 15:27:56.806140 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 04 15:27:56 crc kubenswrapper[4946]: I1204 15:27:56.810047 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 04 15:27:57 crc kubenswrapper[4946]: I1204 15:27:57.481516 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b01d659d-7246-4db4-bce4-1d81adc7bb5b" path="/var/lib/kubelet/pods/b01d659d-7246-4db4-bce4-1d81adc7bb5b/volumes" Dec 04 15:27:57 crc kubenswrapper[4946]: I1204 15:27:57.732303 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2f2624be-b71d-475e-a895-515905f6ef24","Type":"ContainerStarted","Data":"b6e15ac0e5f078836365987ea95b382eb042655dbcd3248462f3fd68d90849cf"} Dec 04 15:27:57 crc kubenswrapper[4946]: I1204 15:27:57.734472 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 04 15:27:57 crc kubenswrapper[4946]: I1204 15:27:57.736795 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.040403 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.041732 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-89fh5"] Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.046889 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.148702 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.149303 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.149352 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.149418 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-config\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.149475 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f59l\" (UniqueName: \"kubernetes.io/projected/f6f0046a-c978-4cf7-8199-4617162c1d5f-kube-api-access-8f59l\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.149534 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.165982 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-89fh5"] Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.264401 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.264492 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.264585 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-config\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.264668 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f59l\" (UniqueName: \"kubernetes.io/projected/f6f0046a-c978-4cf7-8199-4617162c1d5f-kube-api-access-8f59l\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.264744 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.265108 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.266559 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.267640 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.268383 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.269139 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.269505 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-config\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.351166 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f59l\" (UniqueName: \"kubernetes.io/projected/f6f0046a-c978-4cf7-8199-4617162c1d5f-kube-api-access-8f59l\") pod \"dnsmasq-dns-5fd9b586ff-89fh5\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:58 crc kubenswrapper[4946]: I1204 15:27:58.401082 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:27:59 crc kubenswrapper[4946]: I1204 15:27:59.020465 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=4.020437513 podStartE2EDuration="4.020437513s" podCreationTimestamp="2025-12-04 15:27:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:27:58.778535073 +0000 UTC m=+1529.664578714" watchObservedRunningTime="2025-12-04 15:27:59.020437513 +0000 UTC m=+1529.906481154" Dec 04 15:27:59 crc kubenswrapper[4946]: W1204 15:27:59.036416 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf6f0046a_c978_4cf7_8199_4617162c1d5f.slice/crio-fcd5de62096705b2470faf1b89261c4a70dbe7084b1b2c090bfc560a9be9bc95 WatchSource:0}: Error finding container fcd5de62096705b2470faf1b89261c4a70dbe7084b1b2c090bfc560a9be9bc95: Status 404 returned error can't find the container with id fcd5de62096705b2470faf1b89261c4a70dbe7084b1b2c090bfc560a9be9bc95 Dec 04 15:27:59 crc kubenswrapper[4946]: I1204 15:27:59.038264 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-89fh5"] Dec 04 15:27:59 crc kubenswrapper[4946]: I1204 15:27:59.800141 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d63caa-e471-453a-b2f4-7c09bd2b451e","Type":"ContainerStarted","Data":"3904d6f1306b8b4e5cec95fc01fc96a3ecf53d792286c7b797199fe787127662"} Dec 04 15:27:59 crc kubenswrapper[4946]: I1204 15:27:59.801703 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 15:27:59 crc kubenswrapper[4946]: I1204 15:27:59.804601 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" event={"ID":"f6f0046a-c978-4cf7-8199-4617162c1d5f","Type":"ContainerStarted","Data":"c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76"} Dec 04 15:27:59 crc kubenswrapper[4946]: I1204 15:27:59.804770 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" event={"ID":"f6f0046a-c978-4cf7-8199-4617162c1d5f","Type":"ContainerStarted","Data":"fcd5de62096705b2470faf1b89261c4a70dbe7084b1b2c090bfc560a9be9bc95"} Dec 04 15:27:59 crc kubenswrapper[4946]: I1204 15:27:59.845189 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.286105948 podStartE2EDuration="8.84517157s" podCreationTimestamp="2025-12-04 15:27:51 +0000 UTC" firstStartedPulling="2025-12-04 15:27:52.517284838 +0000 UTC m=+1523.403328479" lastFinishedPulling="2025-12-04 15:27:59.07635046 +0000 UTC m=+1529.962394101" observedRunningTime="2025-12-04 15:27:59.830892036 +0000 UTC m=+1530.716935677" watchObservedRunningTime="2025-12-04 15:27:59.84517157 +0000 UTC m=+1530.731215211" Dec 04 15:28:00 crc kubenswrapper[4946]: I1204 15:28:00.820705 4946 generic.go:334] "Generic (PLEG): container finished" podID="f6f0046a-c978-4cf7-8199-4617162c1d5f" containerID="c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76" exitCode=0 Dec 04 15:28:00 crc kubenswrapper[4946]: I1204 15:28:00.820776 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" event={"ID":"f6f0046a-c978-4cf7-8199-4617162c1d5f","Type":"ContainerDied","Data":"c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76"} Dec 04 15:28:01 crc kubenswrapper[4946]: I1204 15:28:01.125605 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:28:01 crc kubenswrapper[4946]: I1204 15:28:01.839436 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" event={"ID":"f6f0046a-c978-4cf7-8199-4617162c1d5f","Type":"ContainerStarted","Data":"148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22"} Dec 04 15:28:01 crc kubenswrapper[4946]: I1204 15:28:01.841484 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:28:01 crc kubenswrapper[4946]: I1204 15:28:01.868449 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" podStartSLOduration=4.8684266019999995 podStartE2EDuration="4.868426602s" podCreationTimestamp="2025-12-04 15:27:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:28:01.862596065 +0000 UTC m=+1532.748639706" watchObservedRunningTime="2025-12-04 15:28:01.868426602 +0000 UTC m=+1532.754470243" Dec 04 15:28:01 crc kubenswrapper[4946]: I1204 15:28:01.916500 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:01 crc kubenswrapper[4946]: I1204 15:28:01.916774 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="63a28e83-a076-47e9-9072-0c1e196a85df" containerName="nova-api-log" containerID="cri-o://fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43" gracePeriod=30 Dec 04 15:28:01 crc kubenswrapper[4946]: I1204 15:28:01.917523 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="63a28e83-a076-47e9-9072-0c1e196a85df" containerName="nova-api-api" containerID="cri-o://22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207" gracePeriod=30 Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.219017 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.219357 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="ceilometer-central-agent" containerID="cri-o://14befa07830fa576c9114c6337089169bd62960f660ee5a0771ee6fa23ca1ca1" gracePeriod=30 Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.219656 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="ceilometer-notification-agent" containerID="cri-o://68fdd3bfda606c140c2c9ac092bb0c181ab26f65330b27a6493557866a421603" gracePeriod=30 Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.219731 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="sg-core" containerID="cri-o://90cf25673fe2cb94603e09dcc13ca04006aceca7550f2ce25eb72ced120c8b9d" gracePeriod=30 Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.220049 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="proxy-httpd" containerID="cri-o://3904d6f1306b8b4e5cec95fc01fc96a3ecf53d792286c7b797199fe787127662" gracePeriod=30 Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.852233 4946 generic.go:334] "Generic (PLEG): container finished" podID="63a28e83-a076-47e9-9072-0c1e196a85df" containerID="fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43" exitCode=143 Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.852417 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63a28e83-a076-47e9-9072-0c1e196a85df","Type":"ContainerDied","Data":"fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43"} Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.857380 4946 generic.go:334] "Generic (PLEG): container finished" podID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerID="3904d6f1306b8b4e5cec95fc01fc96a3ecf53d792286c7b797199fe787127662" exitCode=0 Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.857415 4946 generic.go:334] "Generic (PLEG): container finished" podID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerID="90cf25673fe2cb94603e09dcc13ca04006aceca7550f2ce25eb72ced120c8b9d" exitCode=2 Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.857423 4946 generic.go:334] "Generic (PLEG): container finished" podID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerID="68fdd3bfda606c140c2c9ac092bb0c181ab26f65330b27a6493557866a421603" exitCode=0 Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.857735 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d63caa-e471-453a-b2f4-7c09bd2b451e","Type":"ContainerDied","Data":"3904d6f1306b8b4e5cec95fc01fc96a3ecf53d792286c7b797199fe787127662"} Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.857850 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d63caa-e471-453a-b2f4-7c09bd2b451e","Type":"ContainerDied","Data":"90cf25673fe2cb94603e09dcc13ca04006aceca7550f2ce25eb72ced120c8b9d"} Dec 04 15:28:02 crc kubenswrapper[4946]: I1204 15:28:02.857921 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d63caa-e471-453a-b2f4-7c09bd2b451e","Type":"ContainerDied","Data":"68fdd3bfda606c140c2c9ac092bb0c181ab26f65330b27a6493557866a421603"} Dec 04 15:28:03 crc kubenswrapper[4946]: I1204 15:28:03.929893 4946 generic.go:334] "Generic (PLEG): container finished" podID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerID="14befa07830fa576c9114c6337089169bd62960f660ee5a0771ee6fa23ca1ca1" exitCode=0 Dec 04 15:28:03 crc kubenswrapper[4946]: I1204 15:28:03.930780 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d63caa-e471-453a-b2f4-7c09bd2b451e","Type":"ContainerDied","Data":"14befa07830fa576c9114c6337089169bd62960f660ee5a0771ee6fa23ca1ca1"} Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.155204 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.293165 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-log-httpd\") pod \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.293227 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-config-data\") pod \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.293323 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-scripts\") pod \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.293443 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-combined-ca-bundle\") pod \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.293494 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-run-httpd\") pod \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.293519 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-ceilometer-tls-certs\") pod \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.293540 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n28n6\" (UniqueName: \"kubernetes.io/projected/a7d63caa-e471-453a-b2f4-7c09bd2b451e-kube-api-access-n28n6\") pod \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.293627 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-sg-core-conf-yaml\") pod \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\" (UID: \"a7d63caa-e471-453a-b2f4-7c09bd2b451e\") " Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.294288 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a7d63caa-e471-453a-b2f4-7c09bd2b451e" (UID: "a7d63caa-e471-453a-b2f4-7c09bd2b451e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.295090 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a7d63caa-e471-453a-b2f4-7c09bd2b451e" (UID: "a7d63caa-e471-453a-b2f4-7c09bd2b451e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.300367 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7d63caa-e471-453a-b2f4-7c09bd2b451e-kube-api-access-n28n6" (OuterVolumeSpecName: "kube-api-access-n28n6") pod "a7d63caa-e471-453a-b2f4-7c09bd2b451e" (UID: "a7d63caa-e471-453a-b2f4-7c09bd2b451e"). InnerVolumeSpecName "kube-api-access-n28n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.301873 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-scripts" (OuterVolumeSpecName: "scripts") pod "a7d63caa-e471-453a-b2f4-7c09bd2b451e" (UID: "a7d63caa-e471-453a-b2f4-7c09bd2b451e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.379252 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a7d63caa-e471-453a-b2f4-7c09bd2b451e" (UID: "a7d63caa-e471-453a-b2f4-7c09bd2b451e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.389782 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "a7d63caa-e471-453a-b2f4-7c09bd2b451e" (UID: "a7d63caa-e471-453a-b2f4-7c09bd2b451e"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.397031 4946 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.397079 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.397091 4946 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d63caa-e471-453a-b2f4-7c09bd2b451e-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.397103 4946 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.397137 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n28n6\" (UniqueName: \"kubernetes.io/projected/a7d63caa-e471-453a-b2f4-7c09bd2b451e-kube-api-access-n28n6\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.397150 4946 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.420917 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7d63caa-e471-453a-b2f4-7c09bd2b451e" (UID: "a7d63caa-e471-453a-b2f4-7c09bd2b451e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.446660 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-config-data" (OuterVolumeSpecName: "config-data") pod "a7d63caa-e471-453a-b2f4-7c09bd2b451e" (UID: "a7d63caa-e471-453a-b2f4-7c09bd2b451e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.499607 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.499650 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d63caa-e471-453a-b2f4-7c09bd2b451e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.948420 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7d63caa-e471-453a-b2f4-7c09bd2b451e","Type":"ContainerDied","Data":"ce953529618156c763198a8801325383f126887d4a0f3503726c36fa82a1cb7f"} Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.948484 4946 scope.go:117] "RemoveContainer" containerID="3904d6f1306b8b4e5cec95fc01fc96a3ecf53d792286c7b797199fe787127662" Dec 04 15:28:04 crc kubenswrapper[4946]: I1204 15:28:04.948538 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:04.999078 4946 scope.go:117] "RemoveContainer" containerID="90cf25673fe2cb94603e09dcc13ca04006aceca7550f2ce25eb72ced120c8b9d" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.025608 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.041257 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.053277 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:28:05 crc kubenswrapper[4946]: E1204 15:28:05.054001 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="ceilometer-central-agent" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.054035 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="ceilometer-central-agent" Dec 04 15:28:05 crc kubenswrapper[4946]: E1204 15:28:05.054069 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="sg-core" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.054079 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="sg-core" Dec 04 15:28:05 crc kubenswrapper[4946]: E1204 15:28:05.054102 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="proxy-httpd" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.054109 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="proxy-httpd" Dec 04 15:28:05 crc kubenswrapper[4946]: E1204 15:28:05.054160 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="ceilometer-notification-agent" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.054171 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="ceilometer-notification-agent" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.054431 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="ceilometer-notification-agent" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.054474 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="sg-core" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.054498 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="proxy-httpd" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.054520 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" containerName="ceilometer-central-agent" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.057285 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.058634 4946 scope.go:117] "RemoveContainer" containerID="68fdd3bfda606c140c2c9ac092bb0c181ab26f65330b27a6493557866a421603" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.061077 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.063445 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.063655 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.066502 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.126266 4946 scope.go:117] "RemoveContainer" containerID="14befa07830fa576c9114c6337089169bd62960f660ee5a0771ee6fa23ca1ca1" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.219060 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-627f2\" (UniqueName: \"kubernetes.io/projected/8fee5427-4b83-40a3-8f2d-765c2237394d-kube-api-access-627f2\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.219372 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-config-data\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.219475 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-scripts\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.219631 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.219709 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-log-httpd\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.219830 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.219914 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-run-httpd\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.220004 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.322341 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.323020 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-log-httpd\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.323268 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.323404 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-run-httpd\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.323539 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.323664 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-627f2\" (UniqueName: \"kubernetes.io/projected/8fee5427-4b83-40a3-8f2d-765c2237394d-kube-api-access-627f2\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.323768 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-config-data\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.323887 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-scripts\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.332796 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-scripts\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.354707 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-run-httpd\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.354760 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-log-httpd\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.363946 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.366611 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.367329 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.369371 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-config-data\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.374725 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-627f2\" (UniqueName: \"kubernetes.io/projected/8fee5427-4b83-40a3-8f2d-765c2237394d-kube-api-access-627f2\") pod \"ceilometer-0\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.421827 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.472978 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7d63caa-e471-453a-b2f4-7c09bd2b451e" path="/var/lib/kubelet/pods/a7d63caa-e471-453a-b2f4-7c09bd2b451e/volumes" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.705788 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.840202 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqw4r\" (UniqueName: \"kubernetes.io/projected/63a28e83-a076-47e9-9072-0c1e196a85df-kube-api-access-dqw4r\") pod \"63a28e83-a076-47e9-9072-0c1e196a85df\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.840378 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-config-data\") pod \"63a28e83-a076-47e9-9072-0c1e196a85df\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.840410 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63a28e83-a076-47e9-9072-0c1e196a85df-logs\") pod \"63a28e83-a076-47e9-9072-0c1e196a85df\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.840436 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-combined-ca-bundle\") pod \"63a28e83-a076-47e9-9072-0c1e196a85df\" (UID: \"63a28e83-a076-47e9-9072-0c1e196a85df\") " Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.841425 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63a28e83-a076-47e9-9072-0c1e196a85df-logs" (OuterVolumeSpecName: "logs") pod "63a28e83-a076-47e9-9072-0c1e196a85df" (UID: "63a28e83-a076-47e9-9072-0c1e196a85df"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.852404 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63a28e83-a076-47e9-9072-0c1e196a85df-kube-api-access-dqw4r" (OuterVolumeSpecName: "kube-api-access-dqw4r") pod "63a28e83-a076-47e9-9072-0c1e196a85df" (UID: "63a28e83-a076-47e9-9072-0c1e196a85df"). InnerVolumeSpecName "kube-api-access-dqw4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.896217 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-config-data" (OuterVolumeSpecName: "config-data") pod "63a28e83-a076-47e9-9072-0c1e196a85df" (UID: "63a28e83-a076-47e9-9072-0c1e196a85df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.924705 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "63a28e83-a076-47e9-9072-0c1e196a85df" (UID: "63a28e83-a076-47e9-9072-0c1e196a85df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.946497 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqw4r\" (UniqueName: \"kubernetes.io/projected/63a28e83-a076-47e9-9072-0c1e196a85df-kube-api-access-dqw4r\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.946602 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.946613 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63a28e83-a076-47e9-9072-0c1e196a85df-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.946621 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63a28e83-a076-47e9-9072-0c1e196a85df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.951471 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.971982 4946 generic.go:334] "Generic (PLEG): container finished" podID="63a28e83-a076-47e9-9072-0c1e196a85df" containerID="22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207" exitCode=0 Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.972107 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63a28e83-a076-47e9-9072-0c1e196a85df","Type":"ContainerDied","Data":"22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207"} Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.972172 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63a28e83-a076-47e9-9072-0c1e196a85df","Type":"ContainerDied","Data":"4420b628431bd8e8e0954812e65039c9a23345c10cb1a0c5efc9adf14320ab17"} Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.972189 4946 scope.go:117] "RemoveContainer" containerID="22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207" Dec 04 15:28:05 crc kubenswrapper[4946]: I1204 15:28:05.972310 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.028184 4946 scope.go:117] "RemoveContainer" containerID="fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.041288 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.054079 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.075308 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:06 crc kubenswrapper[4946]: E1204 15:28:06.075932 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63a28e83-a076-47e9-9072-0c1e196a85df" containerName="nova-api-api" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.075956 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="63a28e83-a076-47e9-9072-0c1e196a85df" containerName="nova-api-api" Dec 04 15:28:06 crc kubenswrapper[4946]: E1204 15:28:06.075989 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63a28e83-a076-47e9-9072-0c1e196a85df" containerName="nova-api-log" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.075998 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="63a28e83-a076-47e9-9072-0c1e196a85df" containerName="nova-api-log" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.076259 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="63a28e83-a076-47e9-9072-0c1e196a85df" containerName="nova-api-api" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.076284 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="63a28e83-a076-47e9-9072-0c1e196a85df" containerName="nova-api-log" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.077814 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.081494 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.081970 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.083162 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.088804 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.124498 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.145181 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vt6dh"] Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.145776 4946 scope.go:117] "RemoveContainer" containerID="22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207" Dec 04 15:28:06 crc kubenswrapper[4946]: E1204 15:28:06.146475 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207\": container with ID starting with 22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207 not found: ID does not exist" containerID="22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.146522 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207"} err="failed to get container status \"22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207\": rpc error: code = NotFound desc = could not find container \"22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207\": container with ID starting with 22e819d025a85f0a37f05fe5f4a993a1261ff60461b8094a73ccd3ad20fcc207 not found: ID does not exist" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.146556 4946 scope.go:117] "RemoveContainer" containerID="fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43" Dec 04 15:28:06 crc kubenswrapper[4946]: E1204 15:28:06.146837 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43\": container with ID starting with fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43 not found: ID does not exist" containerID="fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.146864 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43"} err="failed to get container status \"fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43\": rpc error: code = NotFound desc = could not find container \"fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43\": container with ID starting with fde34a681586a37d0e9a1830d3932e86f1991b224b6e006c63cdf7809cab9a43 not found: ID does not exist" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.147516 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.165825 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vt6dh"] Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.174711 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.255249 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.255348 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf28764b-4eae-446c-9140-aeff43c87d8b-logs\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.256192 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbvh9\" (UniqueName: \"kubernetes.io/projected/cf28764b-4eae-446c-9140-aeff43c87d8b-kube-api-access-pbvh9\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.256229 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.256289 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-public-tls-certs\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.256341 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-utilities\") pod \"redhat-marketplace-vt6dh\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.256378 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-catalog-content\") pod \"redhat-marketplace-vt6dh\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.256505 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-config-data\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.256528 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99xdj\" (UniqueName: \"kubernetes.io/projected/adda160a-dd48-4f0d-97fc-068f671ff3b4-kube-api-access-99xdj\") pod \"redhat-marketplace-vt6dh\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.358943 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.359020 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf28764b-4eae-446c-9140-aeff43c87d8b-logs\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.359095 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbvh9\" (UniqueName: \"kubernetes.io/projected/cf28764b-4eae-446c-9140-aeff43c87d8b-kube-api-access-pbvh9\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.359142 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.359188 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-public-tls-certs\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.359233 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-utilities\") pod \"redhat-marketplace-vt6dh\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.359264 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-catalog-content\") pod \"redhat-marketplace-vt6dh\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.359355 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-config-data\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.359374 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99xdj\" (UniqueName: \"kubernetes.io/projected/adda160a-dd48-4f0d-97fc-068f671ff3b4-kube-api-access-99xdj\") pod \"redhat-marketplace-vt6dh\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.359456 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf28764b-4eae-446c-9140-aeff43c87d8b-logs\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.360033 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-utilities\") pod \"redhat-marketplace-vt6dh\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.360203 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-catalog-content\") pod \"redhat-marketplace-vt6dh\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.366798 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.374747 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-config-data\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.376408 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.379617 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-public-tls-certs\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.379650 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99xdj\" (UniqueName: \"kubernetes.io/projected/adda160a-dd48-4f0d-97fc-068f671ff3b4-kube-api-access-99xdj\") pod \"redhat-marketplace-vt6dh\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.389648 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbvh9\" (UniqueName: \"kubernetes.io/projected/cf28764b-4eae-446c-9140-aeff43c87d8b-kube-api-access-pbvh9\") pod \"nova-api-0\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.435590 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:28:06 crc kubenswrapper[4946]: I1204 15:28:06.467693 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.010657 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8fee5427-4b83-40a3-8f2d-765c2237394d","Type":"ContainerStarted","Data":"5dc21cd30ab6e7aaa3e80c751080780b58535a66049b86b519eff2b5d4f1ac47"} Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.036239 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.243648 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.270235 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-6fxz5"] Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.274083 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.277689 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.278662 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.293873 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-6fxz5"] Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.405637 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-scripts\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.405749 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdxw7\" (UniqueName: \"kubernetes.io/projected/680312de-5b5b-4622-8bf8-c987b2f87a05-kube-api-access-pdxw7\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.405788 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.405820 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-config-data\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.479517 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63a28e83-a076-47e9-9072-0c1e196a85df" path="/var/lib/kubelet/pods/63a28e83-a076-47e9-9072-0c1e196a85df/volumes" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.507827 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdxw7\" (UniqueName: \"kubernetes.io/projected/680312de-5b5b-4622-8bf8-c987b2f87a05-kube-api-access-pdxw7\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.507893 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.507926 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-config-data\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.508065 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-scripts\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.518965 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.519039 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-scripts\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.519312 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-config-data\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.543211 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdxw7\" (UniqueName: \"kubernetes.io/projected/680312de-5b5b-4622-8bf8-c987b2f87a05-kube-api-access-pdxw7\") pod \"nova-cell1-cell-mapping-6fxz5\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.724733 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:07 crc kubenswrapper[4946]: I1204 15:28:07.896218 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vt6dh"] Dec 04 15:28:08 crc kubenswrapper[4946]: I1204 15:28:08.050331 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8fee5427-4b83-40a3-8f2d-765c2237394d","Type":"ContainerStarted","Data":"56fed4a121081216984822cb39fd8464c5506d0744654c746283d6e1dcb276f5"} Dec 04 15:28:08 crc kubenswrapper[4946]: I1204 15:28:08.054476 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt6dh" event={"ID":"adda160a-dd48-4f0d-97fc-068f671ff3b4","Type":"ContainerStarted","Data":"ca3a169753baba017217c78ba940ed0ef56f5090d4bd0ba2e01513fea2fc6262"} Dec 04 15:28:08 crc kubenswrapper[4946]: I1204 15:28:08.099035 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf28764b-4eae-446c-9140-aeff43c87d8b","Type":"ContainerStarted","Data":"73ccc965b38ad7d7e101b84b369ee89042545cbf2bf9384dd97b3735a7970fde"} Dec 04 15:28:08 crc kubenswrapper[4946]: I1204 15:28:08.099134 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf28764b-4eae-446c-9140-aeff43c87d8b","Type":"ContainerStarted","Data":"1ee6c06175edcb349b75c1ccd2035b6ced7e2e303925e6ecceb575a40d73e29f"} Dec 04 15:28:08 crc kubenswrapper[4946]: I1204 15:28:08.256838 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-6fxz5"] Dec 04 15:28:08 crc kubenswrapper[4946]: I1204 15:28:08.402269 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:28:08 crc kubenswrapper[4946]: I1204 15:28:08.472442 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-rqn9f"] Dec 04 15:28:08 crc kubenswrapper[4946]: I1204 15:28:08.472696 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" podUID="375837a2-49ae-4f0b-bc7c-a1ba198a8d14" containerName="dnsmasq-dns" containerID="cri-o://51332b4897b78e204a71b8f8333578799325b34c33d0960e53dfa0bf25906353" gracePeriod=10 Dec 04 15:28:08 crc kubenswrapper[4946]: I1204 15:28:08.584090 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" podUID="375837a2-49ae-4f0b-bc7c-a1ba198a8d14" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.214:5353: connect: connection refused" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.204431 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8fee5427-4b83-40a3-8f2d-765c2237394d","Type":"ContainerStarted","Data":"b0c9717788c6e409a4da344bd7b050702b5e979d1c6cb31e5aeb51e6e92b3de7"} Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.217394 4946 generic.go:334] "Generic (PLEG): container finished" podID="adda160a-dd48-4f0d-97fc-068f671ff3b4" containerID="759908a9d8d9f08af330db6728f473a47c469d20746d5c472988ba9ab87e2cf5" exitCode=0 Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.217685 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt6dh" event={"ID":"adda160a-dd48-4f0d-97fc-068f671ff3b4","Type":"ContainerDied","Data":"759908a9d8d9f08af330db6728f473a47c469d20746d5c472988ba9ab87e2cf5"} Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.239875 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf28764b-4eae-446c-9140-aeff43c87d8b","Type":"ContainerStarted","Data":"f1db61e12803845e357899a950844c8e333b469d2b00dadb7d8fad216d89d8fa"} Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.272560 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6fxz5" event={"ID":"680312de-5b5b-4622-8bf8-c987b2f87a05","Type":"ContainerStarted","Data":"bd391085bd9ec385a278d2a8cbf6c58260490215b261651618db61a148b8c2a7"} Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.272610 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6fxz5" event={"ID":"680312de-5b5b-4622-8bf8-c987b2f87a05","Type":"ContainerStarted","Data":"c7359c7a0f5e1b435d27b7ef8cb0b4c92e57f6263627bef63cbc05a1c78f2313"} Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.312221 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.325501 4946 generic.go:334] "Generic (PLEG): container finished" podID="375837a2-49ae-4f0b-bc7c-a1ba198a8d14" containerID="51332b4897b78e204a71b8f8333578799325b34c33d0960e53dfa0bf25906353" exitCode=0 Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.325556 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" event={"ID":"375837a2-49ae-4f0b-bc7c-a1ba198a8d14","Type":"ContainerDied","Data":"51332b4897b78e204a71b8f8333578799325b34c33d0960e53dfa0bf25906353"} Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.325588 4946 scope.go:117] "RemoveContainer" containerID="51332b4897b78e204a71b8f8333578799325b34c33d0960e53dfa0bf25906353" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.371246 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.371220251 podStartE2EDuration="3.371220251s" podCreationTimestamp="2025-12-04 15:28:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:28:09.311609354 +0000 UTC m=+1540.197652995" watchObservedRunningTime="2025-12-04 15:28:09.371220251 +0000 UTC m=+1540.257263892" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.385972 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-nb\") pod \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.386078 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-swift-storage-0\") pod \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.386133 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvxlr\" (UniqueName: \"kubernetes.io/projected/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-kube-api-access-zvxlr\") pod \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.386150 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-config\") pod \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.386229 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-svc\") pod \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.386252 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-sb\") pod \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\" (UID: \"375837a2-49ae-4f0b-bc7c-a1ba198a8d14\") " Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.388045 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-6fxz5" podStartSLOduration=2.388032874 podStartE2EDuration="2.388032874s" podCreationTimestamp="2025-12-04 15:28:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:28:09.371041486 +0000 UTC m=+1540.257085127" watchObservedRunningTime="2025-12-04 15:28:09.388032874 +0000 UTC m=+1540.274076515" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.500515 4946 scope.go:117] "RemoveContainer" containerID="8c1cd2491db148fde07b36f923c557468a263d10af3e79c9f9f6333e1f3cc6a5" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.513321 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-kube-api-access-zvxlr" (OuterVolumeSpecName: "kube-api-access-zvxlr") pod "375837a2-49ae-4f0b-bc7c-a1ba198a8d14" (UID: "375837a2-49ae-4f0b-bc7c-a1ba198a8d14"). InnerVolumeSpecName "kube-api-access-zvxlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.601681 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvxlr\" (UniqueName: \"kubernetes.io/projected/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-kube-api-access-zvxlr\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.619265 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "375837a2-49ae-4f0b-bc7c-a1ba198a8d14" (UID: "375837a2-49ae-4f0b-bc7c-a1ba198a8d14"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.621045 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "375837a2-49ae-4f0b-bc7c-a1ba198a8d14" (UID: "375837a2-49ae-4f0b-bc7c-a1ba198a8d14"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.625873 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "375837a2-49ae-4f0b-bc7c-a1ba198a8d14" (UID: "375837a2-49ae-4f0b-bc7c-a1ba198a8d14"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.633810 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-config" (OuterVolumeSpecName: "config") pod "375837a2-49ae-4f0b-bc7c-a1ba198a8d14" (UID: "375837a2-49ae-4f0b-bc7c-a1ba198a8d14"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.659477 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "375837a2-49ae-4f0b-bc7c-a1ba198a8d14" (UID: "375837a2-49ae-4f0b-bc7c-a1ba198a8d14"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.712457 4946 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.712495 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.712505 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.712514 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:09 crc kubenswrapper[4946]: I1204 15:28:09.712523 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/375837a2-49ae-4f0b-bc7c-a1ba198a8d14-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:10 crc kubenswrapper[4946]: I1204 15:28:10.339732 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8fee5427-4b83-40a3-8f2d-765c2237394d","Type":"ContainerStarted","Data":"ce0ca07982b1a781d7baba4854449f1217660cd72731873e04fa4da365b65edb"} Dec 04 15:28:10 crc kubenswrapper[4946]: I1204 15:28:10.341597 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" event={"ID":"375837a2-49ae-4f0b-bc7c-a1ba198a8d14","Type":"ContainerDied","Data":"34d6e0805680296c5d966aff3983d445b4e8b85a0e2209373f9b36177170019d"} Dec 04 15:28:10 crc kubenswrapper[4946]: I1204 15:28:10.341653 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-rqn9f" Dec 04 15:28:10 crc kubenswrapper[4946]: I1204 15:28:10.381518 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-rqn9f"] Dec 04 15:28:10 crc kubenswrapper[4946]: I1204 15:28:10.397575 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-rqn9f"] Dec 04 15:28:11 crc kubenswrapper[4946]: I1204 15:28:11.355529 4946 generic.go:334] "Generic (PLEG): container finished" podID="adda160a-dd48-4f0d-97fc-068f671ff3b4" containerID="d18cc74a941f7178d0381437889f587da0fe59f20be180a5d1962cb224a82e8f" exitCode=0 Dec 04 15:28:11 crc kubenswrapper[4946]: I1204 15:28:11.355581 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt6dh" event={"ID":"adda160a-dd48-4f0d-97fc-068f671ff3b4","Type":"ContainerDied","Data":"d18cc74a941f7178d0381437889f587da0fe59f20be180a5d1962cb224a82e8f"} Dec 04 15:28:11 crc kubenswrapper[4946]: I1204 15:28:11.465219 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="375837a2-49ae-4f0b-bc7c-a1ba198a8d14" path="/var/lib/kubelet/pods/375837a2-49ae-4f0b-bc7c-a1ba198a8d14/volumes" Dec 04 15:28:12 crc kubenswrapper[4946]: I1204 15:28:12.373482 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8fee5427-4b83-40a3-8f2d-765c2237394d","Type":"ContainerStarted","Data":"ad87e480f6111b4ebe14e9167cd01f53e10105da43fd1dbaa73d6a05bd1006a8"} Dec 04 15:28:12 crc kubenswrapper[4946]: I1204 15:28:12.376962 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 15:28:12 crc kubenswrapper[4946]: I1204 15:28:12.441952 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.623998856 podStartE2EDuration="7.441917073s" podCreationTimestamp="2025-12-04 15:28:05 +0000 UTC" firstStartedPulling="2025-12-04 15:28:05.954744108 +0000 UTC m=+1536.840787749" lastFinishedPulling="2025-12-04 15:28:11.772662315 +0000 UTC m=+1542.658705966" observedRunningTime="2025-12-04 15:28:12.420059284 +0000 UTC m=+1543.306102945" watchObservedRunningTime="2025-12-04 15:28:12.441917073 +0000 UTC m=+1543.327960714" Dec 04 15:28:13 crc kubenswrapper[4946]: I1204 15:28:13.388556 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt6dh" event={"ID":"adda160a-dd48-4f0d-97fc-068f671ff3b4","Type":"ContainerStarted","Data":"007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98"} Dec 04 15:28:13 crc kubenswrapper[4946]: I1204 15:28:13.413335 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vt6dh" podStartSLOduration=4.427327745 podStartE2EDuration="7.413310895s" podCreationTimestamp="2025-12-04 15:28:06 +0000 UTC" firstStartedPulling="2025-12-04 15:28:09.222314237 +0000 UTC m=+1540.108357878" lastFinishedPulling="2025-12-04 15:28:12.208297387 +0000 UTC m=+1543.094341028" observedRunningTime="2025-12-04 15:28:13.406771069 +0000 UTC m=+1544.292814720" watchObservedRunningTime="2025-12-04 15:28:13.413310895 +0000 UTC m=+1544.299354536" Dec 04 15:28:15 crc kubenswrapper[4946]: I1204 15:28:15.414278 4946 generic.go:334] "Generic (PLEG): container finished" podID="680312de-5b5b-4622-8bf8-c987b2f87a05" containerID="bd391085bd9ec385a278d2a8cbf6c58260490215b261651618db61a148b8c2a7" exitCode=0 Dec 04 15:28:15 crc kubenswrapper[4946]: I1204 15:28:15.414393 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6fxz5" event={"ID":"680312de-5b5b-4622-8bf8-c987b2f87a05","Type":"ContainerDied","Data":"bd391085bd9ec385a278d2a8cbf6c58260490215b261651618db61a148b8c2a7"} Dec 04 15:28:16 crc kubenswrapper[4946]: I1204 15:28:16.436925 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 15:28:16 crc kubenswrapper[4946]: I1204 15:28:16.437219 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 15:28:16 crc kubenswrapper[4946]: I1204 15:28:16.468838 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:16 crc kubenswrapper[4946]: I1204 15:28:16.468883 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:16 crc kubenswrapper[4946]: I1204 15:28:16.526955 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:16 crc kubenswrapper[4946]: I1204 15:28:16.969657 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.104864 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-combined-ca-bundle\") pod \"680312de-5b5b-4622-8bf8-c987b2f87a05\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.105324 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-scripts\") pod \"680312de-5b5b-4622-8bf8-c987b2f87a05\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.105495 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-config-data\") pod \"680312de-5b5b-4622-8bf8-c987b2f87a05\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.105633 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdxw7\" (UniqueName: \"kubernetes.io/projected/680312de-5b5b-4622-8bf8-c987b2f87a05-kube-api-access-pdxw7\") pod \"680312de-5b5b-4622-8bf8-c987b2f87a05\" (UID: \"680312de-5b5b-4622-8bf8-c987b2f87a05\") " Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.145978 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/680312de-5b5b-4622-8bf8-c987b2f87a05-kube-api-access-pdxw7" (OuterVolumeSpecName: "kube-api-access-pdxw7") pod "680312de-5b5b-4622-8bf8-c987b2f87a05" (UID: "680312de-5b5b-4622-8bf8-c987b2f87a05"). InnerVolumeSpecName "kube-api-access-pdxw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.146418 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-scripts" (OuterVolumeSpecName: "scripts") pod "680312de-5b5b-4622-8bf8-c987b2f87a05" (UID: "680312de-5b5b-4622-8bf8-c987b2f87a05"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.180345 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-config-data" (OuterVolumeSpecName: "config-data") pod "680312de-5b5b-4622-8bf8-c987b2f87a05" (UID: "680312de-5b5b-4622-8bf8-c987b2f87a05"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.186621 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "680312de-5b5b-4622-8bf8-c987b2f87a05" (UID: "680312de-5b5b-4622-8bf8-c987b2f87a05"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.210875 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.210932 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.210948 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdxw7\" (UniqueName: \"kubernetes.io/projected/680312de-5b5b-4622-8bf8-c987b2f87a05-kube-api-access-pdxw7\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.210962 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680312de-5b5b-4622-8bf8-c987b2f87a05-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.446633 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6fxz5" event={"ID":"680312de-5b5b-4622-8bf8-c987b2f87a05","Type":"ContainerDied","Data":"c7359c7a0f5e1b435d27b7ef8cb0b4c92e57f6263627bef63cbc05a1c78f2313"} Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.446677 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7359c7a0f5e1b435d27b7ef8cb0b4c92e57f6263627bef63cbc05a1c78f2313" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.446777 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6fxz5" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.458342 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.226:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.458706 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.226:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.515442 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.575650 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vt6dh"] Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.633753 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.634012 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerName="nova-api-log" containerID="cri-o://73ccc965b38ad7d7e101b84b369ee89042545cbf2bf9384dd97b3735a7970fde" gracePeriod=30 Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.634536 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerName="nova-api-api" containerID="cri-o://f1db61e12803845e357899a950844c8e333b469d2b00dadb7d8fad216d89d8fa" gracePeriod=30 Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.650369 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.651803 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8d0bae15-c439-4171-93c7-c652891d31fe" containerName="nova-scheduler-scheduler" containerID="cri-o://35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c" gracePeriod=30 Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.696078 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.696432 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-log" containerID="cri-o://67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644" gracePeriod=30 Dec 04 15:28:17 crc kubenswrapper[4946]: I1204 15:28:17.696514 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-metadata" containerID="cri-o://d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530" gracePeriod=30 Dec 04 15:28:18 crc kubenswrapper[4946]: I1204 15:28:18.465022 4946 generic.go:334] "Generic (PLEG): container finished" podID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerID="67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644" exitCode=143 Dec 04 15:28:18 crc kubenswrapper[4946]: I1204 15:28:18.465195 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"be834203-f4c5-4c00-a37a-7f8193bc6047","Type":"ContainerDied","Data":"67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644"} Dec 04 15:28:18 crc kubenswrapper[4946]: I1204 15:28:18.467161 4946 generic.go:334] "Generic (PLEG): container finished" podID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerID="73ccc965b38ad7d7e101b84b369ee89042545cbf2bf9384dd97b3735a7970fde" exitCode=143 Dec 04 15:28:18 crc kubenswrapper[4946]: I1204 15:28:18.467223 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf28764b-4eae-446c-9140-aeff43c87d8b","Type":"ContainerDied","Data":"73ccc965b38ad7d7e101b84b369ee89042545cbf2bf9384dd97b3735a7970fde"} Dec 04 15:28:19 crc kubenswrapper[4946]: I1204 15:28:19.479874 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vt6dh" podUID="adda160a-dd48-4f0d-97fc-068f671ff3b4" containerName="registry-server" containerID="cri-o://007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98" gracePeriod=2 Dec 04 15:28:19 crc kubenswrapper[4946]: E1204 15:28:19.750707 4946 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 15:28:19 crc kubenswrapper[4946]: E1204 15:28:19.756473 4946 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 15:28:19 crc kubenswrapper[4946]: E1204 15:28:19.758960 4946 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 15:28:19 crc kubenswrapper[4946]: E1204 15:28:19.759139 4946 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="8d0bae15-c439-4171-93c7-c652891d31fe" containerName="nova-scheduler-scheduler" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.090236 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.173546 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-catalog-content\") pod \"adda160a-dd48-4f0d-97fc-068f671ff3b4\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.173948 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-utilities\") pod \"adda160a-dd48-4f0d-97fc-068f671ff3b4\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.174040 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99xdj\" (UniqueName: \"kubernetes.io/projected/adda160a-dd48-4f0d-97fc-068f671ff3b4-kube-api-access-99xdj\") pod \"adda160a-dd48-4f0d-97fc-068f671ff3b4\" (UID: \"adda160a-dd48-4f0d-97fc-068f671ff3b4\") " Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.175804 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-utilities" (OuterVolumeSpecName: "utilities") pod "adda160a-dd48-4f0d-97fc-068f671ff3b4" (UID: "adda160a-dd48-4f0d-97fc-068f671ff3b4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.183424 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adda160a-dd48-4f0d-97fc-068f671ff3b4-kube-api-access-99xdj" (OuterVolumeSpecName: "kube-api-access-99xdj") pod "adda160a-dd48-4f0d-97fc-068f671ff3b4" (UID: "adda160a-dd48-4f0d-97fc-068f671ff3b4"). InnerVolumeSpecName "kube-api-access-99xdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.191424 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "adda160a-dd48-4f0d-97fc-068f671ff3b4" (UID: "adda160a-dd48-4f0d-97fc-068f671ff3b4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.277618 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99xdj\" (UniqueName: \"kubernetes.io/projected/adda160a-dd48-4f0d-97fc-068f671ff3b4-kube-api-access-99xdj\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.277687 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.277701 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/adda160a-dd48-4f0d-97fc-068f671ff3b4-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.493456 4946 generic.go:334] "Generic (PLEG): container finished" podID="adda160a-dd48-4f0d-97fc-068f671ff3b4" containerID="007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98" exitCode=0 Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.493520 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt6dh" event={"ID":"adda160a-dd48-4f0d-97fc-068f671ff3b4","Type":"ContainerDied","Data":"007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98"} Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.493559 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt6dh" event={"ID":"adda160a-dd48-4f0d-97fc-068f671ff3b4","Type":"ContainerDied","Data":"ca3a169753baba017217c78ba940ed0ef56f5090d4bd0ba2e01513fea2fc6262"} Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.493553 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vt6dh" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.493576 4946 scope.go:117] "RemoveContainer" containerID="007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.524225 4946 scope.go:117] "RemoveContainer" containerID="d18cc74a941f7178d0381437889f587da0fe59f20be180a5d1962cb224a82e8f" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.535616 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vt6dh"] Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.544844 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vt6dh"] Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.569370 4946 scope.go:117] "RemoveContainer" containerID="759908a9d8d9f08af330db6728f473a47c469d20746d5c472988ba9ab87e2cf5" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.616951 4946 scope.go:117] "RemoveContainer" containerID="007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98" Dec 04 15:28:20 crc kubenswrapper[4946]: E1204 15:28:20.617520 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98\": container with ID starting with 007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98 not found: ID does not exist" containerID="007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.617562 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98"} err="failed to get container status \"007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98\": rpc error: code = NotFound desc = could not find container \"007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98\": container with ID starting with 007788b9913c0b64a99471a0b53537c075b82575ff22943222d2d6c0c190ef98 not found: ID does not exist" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.617597 4946 scope.go:117] "RemoveContainer" containerID="d18cc74a941f7178d0381437889f587da0fe59f20be180a5d1962cb224a82e8f" Dec 04 15:28:20 crc kubenswrapper[4946]: E1204 15:28:20.618000 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d18cc74a941f7178d0381437889f587da0fe59f20be180a5d1962cb224a82e8f\": container with ID starting with d18cc74a941f7178d0381437889f587da0fe59f20be180a5d1962cb224a82e8f not found: ID does not exist" containerID="d18cc74a941f7178d0381437889f587da0fe59f20be180a5d1962cb224a82e8f" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.618071 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d18cc74a941f7178d0381437889f587da0fe59f20be180a5d1962cb224a82e8f"} err="failed to get container status \"d18cc74a941f7178d0381437889f587da0fe59f20be180a5d1962cb224a82e8f\": rpc error: code = NotFound desc = could not find container \"d18cc74a941f7178d0381437889f587da0fe59f20be180a5d1962cb224a82e8f\": container with ID starting with d18cc74a941f7178d0381437889f587da0fe59f20be180a5d1962cb224a82e8f not found: ID does not exist" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.618127 4946 scope.go:117] "RemoveContainer" containerID="759908a9d8d9f08af330db6728f473a47c469d20746d5c472988ba9ab87e2cf5" Dec 04 15:28:20 crc kubenswrapper[4946]: E1204 15:28:20.618458 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"759908a9d8d9f08af330db6728f473a47c469d20746d5c472988ba9ab87e2cf5\": container with ID starting with 759908a9d8d9f08af330db6728f473a47c469d20746d5c472988ba9ab87e2cf5 not found: ID does not exist" containerID="759908a9d8d9f08af330db6728f473a47c469d20746d5c472988ba9ab87e2cf5" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.618502 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"759908a9d8d9f08af330db6728f473a47c469d20746d5c472988ba9ab87e2cf5"} err="failed to get container status \"759908a9d8d9f08af330db6728f473a47c469d20746d5c472988ba9ab87e2cf5\": rpc error: code = NotFound desc = could not find container \"759908a9d8d9f08af330db6728f473a47c469d20746d5c472988ba9ab87e2cf5\": container with ID starting with 759908a9d8d9f08af330db6728f473a47c469d20746d5c472988ba9ab87e2cf5 not found: ID does not exist" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.868970 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.218:8775/\": read tcp 10.217.0.2:48420->10.217.0.218:8775: read: connection reset by peer" Dec 04 15:28:20 crc kubenswrapper[4946]: I1204 15:28:20.869039 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.218:8775/\": read tcp 10.217.0.2:48410->10.217.0.218:8775: read: connection reset by peer" Dec 04 15:28:21 crc kubenswrapper[4946]: I1204 15:28:21.099133 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 15:28:21 crc kubenswrapper[4946]: I1204 15:28:21.194921 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-combined-ca-bundle\") pod \"8d0bae15-c439-4171-93c7-c652891d31fe\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " Dec 04 15:28:21 crc kubenswrapper[4946]: I1204 15:28:21.195211 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pg64t\" (UniqueName: \"kubernetes.io/projected/8d0bae15-c439-4171-93c7-c652891d31fe-kube-api-access-pg64t\") pod \"8d0bae15-c439-4171-93c7-c652891d31fe\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " Dec 04 15:28:21 crc kubenswrapper[4946]: I1204 15:28:21.195284 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-config-data\") pod \"8d0bae15-c439-4171-93c7-c652891d31fe\" (UID: \"8d0bae15-c439-4171-93c7-c652891d31fe\") " Dec 04 15:28:21 crc kubenswrapper[4946]: I1204 15:28:21.202630 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d0bae15-c439-4171-93c7-c652891d31fe-kube-api-access-pg64t" (OuterVolumeSpecName: "kube-api-access-pg64t") pod "8d0bae15-c439-4171-93c7-c652891d31fe" (UID: "8d0bae15-c439-4171-93c7-c652891d31fe"). InnerVolumeSpecName "kube-api-access-pg64t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.236719 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-config-data" (OuterVolumeSpecName: "config-data") pod "8d0bae15-c439-4171-93c7-c652891d31fe" (UID: "8d0bae15-c439-4171-93c7-c652891d31fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.239267 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d0bae15-c439-4171-93c7-c652891d31fe" (UID: "8d0bae15-c439-4171-93c7-c652891d31fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.298364 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pg64t\" (UniqueName: \"kubernetes.io/projected/8d0bae15-c439-4171-93c7-c652891d31fe-kube-api-access-pg64t\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.298400 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.298415 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d0bae15-c439-4171-93c7-c652891d31fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.316108 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.400421 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be834203-f4c5-4c00-a37a-7f8193bc6047-logs\") pod \"be834203-f4c5-4c00-a37a-7f8193bc6047\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.400715 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-config-data\") pod \"be834203-f4c5-4c00-a37a-7f8193bc6047\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.400748 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-combined-ca-bundle\") pod \"be834203-f4c5-4c00-a37a-7f8193bc6047\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.400790 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-nova-metadata-tls-certs\") pod \"be834203-f4c5-4c00-a37a-7f8193bc6047\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.400840 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sn2hp\" (UniqueName: \"kubernetes.io/projected/be834203-f4c5-4c00-a37a-7f8193bc6047-kube-api-access-sn2hp\") pod \"be834203-f4c5-4c00-a37a-7f8193bc6047\" (UID: \"be834203-f4c5-4c00-a37a-7f8193bc6047\") " Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.405588 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be834203-f4c5-4c00-a37a-7f8193bc6047-logs" (OuterVolumeSpecName: "logs") pod "be834203-f4c5-4c00-a37a-7f8193bc6047" (UID: "be834203-f4c5-4c00-a37a-7f8193bc6047"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.407510 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be834203-f4c5-4c00-a37a-7f8193bc6047-kube-api-access-sn2hp" (OuterVolumeSpecName: "kube-api-access-sn2hp") pod "be834203-f4c5-4c00-a37a-7f8193bc6047" (UID: "be834203-f4c5-4c00-a37a-7f8193bc6047"). InnerVolumeSpecName "kube-api-access-sn2hp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.435262 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be834203-f4c5-4c00-a37a-7f8193bc6047" (UID: "be834203-f4c5-4c00-a37a-7f8193bc6047"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.444256 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-config-data" (OuterVolumeSpecName: "config-data") pod "be834203-f4c5-4c00-a37a-7f8193bc6047" (UID: "be834203-f4c5-4c00-a37a-7f8193bc6047"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.464418 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "be834203-f4c5-4c00-a37a-7f8193bc6047" (UID: "be834203-f4c5-4c00-a37a-7f8193bc6047"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.468265 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adda160a-dd48-4f0d-97fc-068f671ff3b4" path="/var/lib/kubelet/pods/adda160a-dd48-4f0d-97fc-068f671ff3b4/volumes" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.504523 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be834203-f4c5-4c00-a37a-7f8193bc6047-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.504550 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.504562 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.504573 4946 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/be834203-f4c5-4c00-a37a-7f8193bc6047-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.504584 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sn2hp\" (UniqueName: \"kubernetes.io/projected/be834203-f4c5-4c00-a37a-7f8193bc6047-kube-api-access-sn2hp\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.517016 4946 generic.go:334] "Generic (PLEG): container finished" podID="8d0bae15-c439-4171-93c7-c652891d31fe" containerID="35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c" exitCode=0 Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.517094 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8d0bae15-c439-4171-93c7-c652891d31fe","Type":"ContainerDied","Data":"35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c"} Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.517151 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8d0bae15-c439-4171-93c7-c652891d31fe","Type":"ContainerDied","Data":"5a4167d61728d84f9e71189844d4332639a6c5f9183a557112224467ef8af3aa"} Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.517178 4946 scope.go:117] "RemoveContainer" containerID="35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.517310 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.525642 4946 generic.go:334] "Generic (PLEG): container finished" podID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerID="d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530" exitCode=0 Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.525690 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"be834203-f4c5-4c00-a37a-7f8193bc6047","Type":"ContainerDied","Data":"d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530"} Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.525718 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"be834203-f4c5-4c00-a37a-7f8193bc6047","Type":"ContainerDied","Data":"f26b9ce07fd0e9db5778f1a3aef7707d2dc7fb5241cb3a868e625d3e53abbecb"} Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.525781 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.555541 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.567790 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.570281 4946 scope.go:117] "RemoveContainer" containerID="35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.571723 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c\": container with ID starting with 35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c not found: ID does not exist" containerID="35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.571769 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c"} err="failed to get container status \"35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c\": rpc error: code = NotFound desc = could not find container \"35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c\": container with ID starting with 35abd03b520252fd61eeb9554fc3d7d5c8b96156abfaa82c217eab32ea06992c not found: ID does not exist" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.571800 4946 scope.go:117] "RemoveContainer" containerID="d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.590623 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.601937 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.615297 4946 scope.go:117] "RemoveContainer" containerID="67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.621532 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adda160a-dd48-4f0d-97fc-068f671ff3b4" containerName="extract-content" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.621565 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="adda160a-dd48-4f0d-97fc-068f671ff3b4" containerName="extract-content" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.621593 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-metadata" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.621601 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-metadata" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.621612 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="375837a2-49ae-4f0b-bc7c-a1ba198a8d14" containerName="init" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.621618 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="375837a2-49ae-4f0b-bc7c-a1ba198a8d14" containerName="init" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.621636 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d0bae15-c439-4171-93c7-c652891d31fe" containerName="nova-scheduler-scheduler" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.621642 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d0bae15-c439-4171-93c7-c652891d31fe" containerName="nova-scheduler-scheduler" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.621660 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-log" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.621666 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-log" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.621673 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="680312de-5b5b-4622-8bf8-c987b2f87a05" containerName="nova-manage" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.621679 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="680312de-5b5b-4622-8bf8-c987b2f87a05" containerName="nova-manage" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.621698 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adda160a-dd48-4f0d-97fc-068f671ff3b4" containerName="extract-utilities" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.621704 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="adda160a-dd48-4f0d-97fc-068f671ff3b4" containerName="extract-utilities" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.621722 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="375837a2-49ae-4f0b-bc7c-a1ba198a8d14" containerName="dnsmasq-dns" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.621728 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="375837a2-49ae-4f0b-bc7c-a1ba198a8d14" containerName="dnsmasq-dns" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.621735 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adda160a-dd48-4f0d-97fc-068f671ff3b4" containerName="registry-server" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.621743 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="adda160a-dd48-4f0d-97fc-068f671ff3b4" containerName="registry-server" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.622034 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-log" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.622053 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d0bae15-c439-4171-93c7-c652891d31fe" containerName="nova-scheduler-scheduler" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.622063 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="375837a2-49ae-4f0b-bc7c-a1ba198a8d14" containerName="dnsmasq-dns" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.622075 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="adda160a-dd48-4f0d-97fc-068f671ff3b4" containerName="registry-server" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.622082 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" containerName="nova-metadata-metadata" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.622099 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="680312de-5b5b-4622-8bf8-c987b2f87a05" containerName="nova-manage" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.622843 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.622865 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.622947 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.639036 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.667905 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.669827 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.673236 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.673479 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.717134 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.724467 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c624054-0759-47af-af3e-4600907ab8b8-logs\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.724514 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2xrt\" (UniqueName: \"kubernetes.io/projected/9c624054-0759-47af-af3e-4600907ab8b8-kube-api-access-b2xrt\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.724581 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ba8816-fd2e-4a8d-bbcf-d2178110c7eb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"29ba8816-fd2e-4a8d-bbcf-d2178110c7eb\") " pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.724599 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ba8816-fd2e-4a8d-bbcf-d2178110c7eb-config-data\") pod \"nova-scheduler-0\" (UID: \"29ba8816-fd2e-4a8d-bbcf-d2178110c7eb\") " pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.724639 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c624054-0759-47af-af3e-4600907ab8b8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.724658 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c624054-0759-47af-af3e-4600907ab8b8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.724707 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhhgv\" (UniqueName: \"kubernetes.io/projected/29ba8816-fd2e-4a8d-bbcf-d2178110c7eb-kube-api-access-dhhgv\") pod \"nova-scheduler-0\" (UID: \"29ba8816-fd2e-4a8d-bbcf-d2178110c7eb\") " pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.724745 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c624054-0759-47af-af3e-4600907ab8b8-config-data\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.735990 4946 scope.go:117] "RemoveContainer" containerID="d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.736524 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530\": container with ID starting with d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530 not found: ID does not exist" containerID="d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.736581 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530"} err="failed to get container status \"d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530\": rpc error: code = NotFound desc = could not find container \"d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530\": container with ID starting with d81eeac5fe2ac5de25ad8c82ced74845aa674d08f66014da3d3c5c1ae802b530 not found: ID does not exist" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.736611 4946 scope.go:117] "RemoveContainer" containerID="67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644" Dec 04 15:28:22 crc kubenswrapper[4946]: E1204 15:28:21.737223 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644\": container with ID starting with 67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644 not found: ID does not exist" containerID="67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.737278 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644"} err="failed to get container status \"67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644\": rpc error: code = NotFound desc = could not find container \"67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644\": container with ID starting with 67cfa83c4de8ce357c5f059859881b55af72cf69cd967de65cb1dfe2c0396644 not found: ID does not exist" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.826519 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2xrt\" (UniqueName: \"kubernetes.io/projected/9c624054-0759-47af-af3e-4600907ab8b8-kube-api-access-b2xrt\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.826642 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ba8816-fd2e-4a8d-bbcf-d2178110c7eb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"29ba8816-fd2e-4a8d-bbcf-d2178110c7eb\") " pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.826674 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ba8816-fd2e-4a8d-bbcf-d2178110c7eb-config-data\") pod \"nova-scheduler-0\" (UID: \"29ba8816-fd2e-4a8d-bbcf-d2178110c7eb\") " pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.826777 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c624054-0759-47af-af3e-4600907ab8b8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.826810 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c624054-0759-47af-af3e-4600907ab8b8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.826900 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhhgv\" (UniqueName: \"kubernetes.io/projected/29ba8816-fd2e-4a8d-bbcf-d2178110c7eb-kube-api-access-dhhgv\") pod \"nova-scheduler-0\" (UID: \"29ba8816-fd2e-4a8d-bbcf-d2178110c7eb\") " pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.826955 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c624054-0759-47af-af3e-4600907ab8b8-config-data\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.826994 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c624054-0759-47af-af3e-4600907ab8b8-logs\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.827489 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c624054-0759-47af-af3e-4600907ab8b8-logs\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.832413 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c624054-0759-47af-af3e-4600907ab8b8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.832496 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c624054-0759-47af-af3e-4600907ab8b8-config-data\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.832752 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ba8816-fd2e-4a8d-bbcf-d2178110c7eb-config-data\") pod \"nova-scheduler-0\" (UID: \"29ba8816-fd2e-4a8d-bbcf-d2178110c7eb\") " pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.834714 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c624054-0759-47af-af3e-4600907ab8b8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.847822 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ba8816-fd2e-4a8d-bbcf-d2178110c7eb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"29ba8816-fd2e-4a8d-bbcf-d2178110c7eb\") " pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.853304 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2xrt\" (UniqueName: \"kubernetes.io/projected/9c624054-0759-47af-af3e-4600907ab8b8-kube-api-access-b2xrt\") pod \"nova-metadata-0\" (UID: \"9c624054-0759-47af-af3e-4600907ab8b8\") " pod="openstack/nova-metadata-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:21.855464 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhhgv\" (UniqueName: \"kubernetes.io/projected/29ba8816-fd2e-4a8d-bbcf-d2178110c7eb-kube-api-access-dhhgv\") pod \"nova-scheduler-0\" (UID: \"29ba8816-fd2e-4a8d-bbcf-d2178110c7eb\") " pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:22.008277 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 15:28:22 crc kubenswrapper[4946]: I1204 15:28:22.021995 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.404065 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.429017 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.473659 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d0bae15-c439-4171-93c7-c652891d31fe" path="/var/lib/kubelet/pods/8d0bae15-c439-4171-93c7-c652891d31fe/volumes" Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.474394 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be834203-f4c5-4c00-a37a-7f8193bc6047" path="/var/lib/kubelet/pods/be834203-f4c5-4c00-a37a-7f8193bc6047/volumes" Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.553376 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"29ba8816-fd2e-4a8d-bbcf-d2178110c7eb","Type":"ContainerStarted","Data":"78ba856d0e686b80e07ca67d2ef4d32a14181449810e4617901bc1436ef14183"} Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.557792 4946 generic.go:334] "Generic (PLEG): container finished" podID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerID="f1db61e12803845e357899a950844c8e333b469d2b00dadb7d8fad216d89d8fa" exitCode=0 Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.557865 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf28764b-4eae-446c-9140-aeff43c87d8b","Type":"ContainerDied","Data":"f1db61e12803845e357899a950844c8e333b469d2b00dadb7d8fad216d89d8fa"} Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.560339 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9c624054-0759-47af-af3e-4600907ab8b8","Type":"ContainerStarted","Data":"dea9327d6f92eaa38896340e2aa64afc86939ffff617279e5375510a1a639365"} Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.823179 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.883523 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-public-tls-certs\") pod \"cf28764b-4eae-446c-9140-aeff43c87d8b\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.883679 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-combined-ca-bundle\") pod \"cf28764b-4eae-446c-9140-aeff43c87d8b\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.883743 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-config-data\") pod \"cf28764b-4eae-446c-9140-aeff43c87d8b\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.883789 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf28764b-4eae-446c-9140-aeff43c87d8b-logs\") pod \"cf28764b-4eae-446c-9140-aeff43c87d8b\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.883812 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-internal-tls-certs\") pod \"cf28764b-4eae-446c-9140-aeff43c87d8b\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.883940 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbvh9\" (UniqueName: \"kubernetes.io/projected/cf28764b-4eae-446c-9140-aeff43c87d8b-kube-api-access-pbvh9\") pod \"cf28764b-4eae-446c-9140-aeff43c87d8b\" (UID: \"cf28764b-4eae-446c-9140-aeff43c87d8b\") " Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.894015 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf28764b-4eae-446c-9140-aeff43c87d8b-logs" (OuterVolumeSpecName: "logs") pod "cf28764b-4eae-446c-9140-aeff43c87d8b" (UID: "cf28764b-4eae-446c-9140-aeff43c87d8b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.895920 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf28764b-4eae-446c-9140-aeff43c87d8b-kube-api-access-pbvh9" (OuterVolumeSpecName: "kube-api-access-pbvh9") pod "cf28764b-4eae-446c-9140-aeff43c87d8b" (UID: "cf28764b-4eae-446c-9140-aeff43c87d8b"). InnerVolumeSpecName "kube-api-access-pbvh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.968022 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf28764b-4eae-446c-9140-aeff43c87d8b" (UID: "cf28764b-4eae-446c-9140-aeff43c87d8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.988253 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbvh9\" (UniqueName: \"kubernetes.io/projected/cf28764b-4eae-446c-9140-aeff43c87d8b-kube-api-access-pbvh9\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.988285 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:23 crc kubenswrapper[4946]: I1204 15:28:23.988295 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf28764b-4eae-446c-9140-aeff43c87d8b-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.002438 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-config-data" (OuterVolumeSpecName: "config-data") pod "cf28764b-4eae-446c-9140-aeff43c87d8b" (UID: "cf28764b-4eae-446c-9140-aeff43c87d8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.024793 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cf28764b-4eae-446c-9140-aeff43c87d8b" (UID: "cf28764b-4eae-446c-9140-aeff43c87d8b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.033458 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cf28764b-4eae-446c-9140-aeff43c87d8b" (UID: "cf28764b-4eae-446c-9140-aeff43c87d8b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.090961 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.090999 4946 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.091010 4946 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf28764b-4eae-446c-9140-aeff43c87d8b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.580076 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.580741 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf28764b-4eae-446c-9140-aeff43c87d8b","Type":"ContainerDied","Data":"1ee6c06175edcb349b75c1ccd2035b6ced7e2e303925e6ecceb575a40d73e29f"} Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.580832 4946 scope.go:117] "RemoveContainer" containerID="f1db61e12803845e357899a950844c8e333b469d2b00dadb7d8fad216d89d8fa" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.591929 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9c624054-0759-47af-af3e-4600907ab8b8","Type":"ContainerStarted","Data":"b1a38df1fcf23dd7d50cab14802bc04ab397a0ee8a829a00cbb6ebf03c76b2ad"} Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.591980 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9c624054-0759-47af-af3e-4600907ab8b8","Type":"ContainerStarted","Data":"5e753c8d949f31c754d69c2d913c26e38b5ea7dbf5aa0a5279ef432a6f9f02b5"} Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.593865 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"29ba8816-fd2e-4a8d-bbcf-d2178110c7eb","Type":"ContainerStarted","Data":"02955efd2b5812d037c8b74b4d0b67b390898a1b31f7cf4f0c3a6dbd766c9d00"} Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.625708 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.625690164 podStartE2EDuration="3.625690164s" podCreationTimestamp="2025-12-04 15:28:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:28:24.620996438 +0000 UTC m=+1555.507040079" watchObservedRunningTime="2025-12-04 15:28:24.625690164 +0000 UTC m=+1555.511733805" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.635864 4946 scope.go:117] "RemoveContainer" containerID="73ccc965b38ad7d7e101b84b369ee89042545cbf2bf9384dd97b3735a7970fde" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.664542 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.687699 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.693161 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.693109961 podStartE2EDuration="3.693109961s" podCreationTimestamp="2025-12-04 15:28:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:28:24.665668212 +0000 UTC m=+1555.551711873" watchObservedRunningTime="2025-12-04 15:28:24.693109961 +0000 UTC m=+1555.579153622" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.730046 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:24 crc kubenswrapper[4946]: E1204 15:28:24.730920 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerName="nova-api-api" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.730933 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerName="nova-api-api" Dec 04 15:28:24 crc kubenswrapper[4946]: E1204 15:28:24.730966 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerName="nova-api-log" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.730973 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerName="nova-api-log" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.731183 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerName="nova-api-log" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.731198 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf28764b-4eae-446c-9140-aeff43c87d8b" containerName="nova-api-api" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.732366 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.739928 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.745703 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.745751 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.745902 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.808808 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-public-tls-certs\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.808895 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65s6w\" (UniqueName: \"kubernetes.io/projected/580cdc0a-af87-4eac-8b8e-79d451eb312c-kube-api-access-65s6w\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.808917 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.808994 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-config-data\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.809367 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.809515 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/580cdc0a-af87-4eac-8b8e-79d451eb312c-logs\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.912480 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-config-data\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.912629 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.912690 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/580cdc0a-af87-4eac-8b8e-79d451eb312c-logs\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.912833 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-public-tls-certs\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.912932 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65s6w\" (UniqueName: \"kubernetes.io/projected/580cdc0a-af87-4eac-8b8e-79d451eb312c-kube-api-access-65s6w\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.912978 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.913429 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/580cdc0a-af87-4eac-8b8e-79d451eb312c-logs\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.920285 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-public-tls-certs\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.920878 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-config-data\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.921716 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.937438 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65s6w\" (UniqueName: \"kubernetes.io/projected/580cdc0a-af87-4eac-8b8e-79d451eb312c-kube-api-access-65s6w\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:24 crc kubenswrapper[4946]: I1204 15:28:24.937796 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/580cdc0a-af87-4eac-8b8e-79d451eb312c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"580cdc0a-af87-4eac-8b8e-79d451eb312c\") " pod="openstack/nova-api-0" Dec 04 15:28:25 crc kubenswrapper[4946]: I1204 15:28:25.067107 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 15:28:25 crc kubenswrapper[4946]: I1204 15:28:25.474564 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf28764b-4eae-446c-9140-aeff43c87d8b" path="/var/lib/kubelet/pods/cf28764b-4eae-446c-9140-aeff43c87d8b/volumes" Dec 04 15:28:25 crc kubenswrapper[4946]: I1204 15:28:25.629157 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 15:28:26 crc kubenswrapper[4946]: I1204 15:28:26.668950 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"580cdc0a-af87-4eac-8b8e-79d451eb312c","Type":"ContainerStarted","Data":"a8da93f34b9c03ac01bd079629a20c043fdd0c80136f0e07f1d03f698414f805"} Dec 04 15:28:26 crc kubenswrapper[4946]: I1204 15:28:26.671328 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"580cdc0a-af87-4eac-8b8e-79d451eb312c","Type":"ContainerStarted","Data":"e0532233590bcb039437eae3e774de3b59141f7860d061d5e04b4268d1b58d58"} Dec 04 15:28:26 crc kubenswrapper[4946]: I1204 15:28:26.671450 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"580cdc0a-af87-4eac-8b8e-79d451eb312c","Type":"ContainerStarted","Data":"a8f8f8ee96ddb9880e33da3d779c37839db5b4df973d6365bb8e0b8e2ed547cc"} Dec 04 15:28:26 crc kubenswrapper[4946]: I1204 15:28:26.701415 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.70138054 podStartE2EDuration="2.70138054s" podCreationTimestamp="2025-12-04 15:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:28:26.697751772 +0000 UTC m=+1557.583795453" watchObservedRunningTime="2025-12-04 15:28:26.70138054 +0000 UTC m=+1557.587424201" Dec 04 15:28:27 crc kubenswrapper[4946]: I1204 15:28:27.009652 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 04 15:28:27 crc kubenswrapper[4946]: I1204 15:28:27.022147 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 04 15:28:27 crc kubenswrapper[4946]: I1204 15:28:27.022224 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 04 15:28:32 crc kubenswrapper[4946]: I1204 15:28:32.009932 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 04 15:28:32 crc kubenswrapper[4946]: I1204 15:28:32.022135 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 04 15:28:32 crc kubenswrapper[4946]: I1204 15:28:32.022180 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 04 15:28:32 crc kubenswrapper[4946]: I1204 15:28:32.044619 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 04 15:28:32 crc kubenswrapper[4946]: I1204 15:28:32.813243 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 04 15:28:33 crc kubenswrapper[4946]: I1204 15:28:33.034363 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="9c624054-0759-47af-af3e-4600907ab8b8" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.230:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 15:28:33 crc kubenswrapper[4946]: I1204 15:28:33.034501 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="9c624054-0759-47af-af3e-4600907ab8b8" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.230:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 15:28:35 crc kubenswrapper[4946]: I1204 15:28:35.068410 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 15:28:35 crc kubenswrapper[4946]: I1204 15:28:35.068870 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 15:28:35 crc kubenswrapper[4946]: I1204 15:28:35.432354 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 04 15:28:36 crc kubenswrapper[4946]: I1204 15:28:36.090422 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="580cdc0a-af87-4eac-8b8e-79d451eb312c" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.231:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 15:28:36 crc kubenswrapper[4946]: I1204 15:28:36.090422 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="580cdc0a-af87-4eac-8b8e-79d451eb312c" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.231:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 15:28:37 crc kubenswrapper[4946]: I1204 15:28:37.772126 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qjfsf"] Dec 04 15:28:37 crc kubenswrapper[4946]: I1204 15:28:37.775844 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:37 crc kubenswrapper[4946]: I1204 15:28:37.784864 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qjfsf"] Dec 04 15:28:37 crc kubenswrapper[4946]: I1204 15:28:37.942126 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-catalog-content\") pod \"certified-operators-qjfsf\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:37 crc kubenswrapper[4946]: I1204 15:28:37.942203 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-utilities\") pod \"certified-operators-qjfsf\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:37 crc kubenswrapper[4946]: I1204 15:28:37.942235 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxrrz\" (UniqueName: \"kubernetes.io/projected/51d0224f-2d30-47b7-91fd-78e66a782ee9-kube-api-access-zxrrz\") pod \"certified-operators-qjfsf\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:38 crc kubenswrapper[4946]: I1204 15:28:38.044435 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-catalog-content\") pod \"certified-operators-qjfsf\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:38 crc kubenswrapper[4946]: I1204 15:28:38.044482 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-utilities\") pod \"certified-operators-qjfsf\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:38 crc kubenswrapper[4946]: I1204 15:28:38.044518 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxrrz\" (UniqueName: \"kubernetes.io/projected/51d0224f-2d30-47b7-91fd-78e66a782ee9-kube-api-access-zxrrz\") pod \"certified-operators-qjfsf\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:38 crc kubenswrapper[4946]: I1204 15:28:38.045147 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-catalog-content\") pod \"certified-operators-qjfsf\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:38 crc kubenswrapper[4946]: I1204 15:28:38.045237 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-utilities\") pod \"certified-operators-qjfsf\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:38 crc kubenswrapper[4946]: I1204 15:28:38.065622 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxrrz\" (UniqueName: \"kubernetes.io/projected/51d0224f-2d30-47b7-91fd-78e66a782ee9-kube-api-access-zxrrz\") pod \"certified-operators-qjfsf\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:38 crc kubenswrapper[4946]: I1204 15:28:38.105004 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:38 crc kubenswrapper[4946]: I1204 15:28:38.662200 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qjfsf"] Dec 04 15:28:38 crc kubenswrapper[4946]: I1204 15:28:38.841001 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjfsf" event={"ID":"51d0224f-2d30-47b7-91fd-78e66a782ee9","Type":"ContainerStarted","Data":"dc7f1984c65afa658cce80a6d5d32f0fcd26f65003f98b30daa91b5722c1313d"} Dec 04 15:28:39 crc kubenswrapper[4946]: I1204 15:28:39.855705 4946 generic.go:334] "Generic (PLEG): container finished" podID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerID="007cfa3c1c272ba146a2815368d2a8ed5c0405c48cb70f339240e9b7be78193a" exitCode=0 Dec 04 15:28:39 crc kubenswrapper[4946]: I1204 15:28:39.855766 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjfsf" event={"ID":"51d0224f-2d30-47b7-91fd-78e66a782ee9","Type":"ContainerDied","Data":"007cfa3c1c272ba146a2815368d2a8ed5c0405c48cb70f339240e9b7be78193a"} Dec 04 15:28:40 crc kubenswrapper[4946]: I1204 15:28:40.373170 4946 scope.go:117] "RemoveContainer" containerID="642b11eea5294f96a8b756fe01aa50551e6e58ed71e4a96c3cca3ba0dd71f5f0" Dec 04 15:28:41 crc kubenswrapper[4946]: I1204 15:28:41.244407 4946 scope.go:117] "RemoveContainer" containerID="2dc6e4c7bb421bce35cf4b39ec7a020b3055a60fe92ce33d202140b49b1c7cf1" Dec 04 15:28:42 crc kubenswrapper[4946]: I1204 15:28:42.029953 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 04 15:28:42 crc kubenswrapper[4946]: I1204 15:28:42.030572 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 04 15:28:42 crc kubenswrapper[4946]: I1204 15:28:42.035383 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 04 15:28:42 crc kubenswrapper[4946]: I1204 15:28:42.035821 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 04 15:28:42 crc kubenswrapper[4946]: I1204 15:28:42.892089 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjfsf" event={"ID":"51d0224f-2d30-47b7-91fd-78e66a782ee9","Type":"ContainerStarted","Data":"57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7"} Dec 04 15:28:44 crc kubenswrapper[4946]: I1204 15:28:44.923805 4946 generic.go:334] "Generic (PLEG): container finished" podID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerID="57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7" exitCode=0 Dec 04 15:28:44 crc kubenswrapper[4946]: I1204 15:28:44.924311 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjfsf" event={"ID":"51d0224f-2d30-47b7-91fd-78e66a782ee9","Type":"ContainerDied","Data":"57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7"} Dec 04 15:28:45 crc kubenswrapper[4946]: I1204 15:28:45.076484 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 04 15:28:45 crc kubenswrapper[4946]: I1204 15:28:45.077181 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 04 15:28:45 crc kubenswrapper[4946]: I1204 15:28:45.077628 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 04 15:28:45 crc kubenswrapper[4946]: I1204 15:28:45.077701 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 04 15:28:45 crc kubenswrapper[4946]: I1204 15:28:45.083704 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 04 15:28:45 crc kubenswrapper[4946]: I1204 15:28:45.086031 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 04 15:28:46 crc kubenswrapper[4946]: I1204 15:28:46.952924 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjfsf" event={"ID":"51d0224f-2d30-47b7-91fd-78e66a782ee9","Type":"ContainerStarted","Data":"7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935"} Dec 04 15:28:46 crc kubenswrapper[4946]: I1204 15:28:46.979934 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qjfsf" podStartSLOduration=3.408039274 podStartE2EDuration="9.979913451s" podCreationTimestamp="2025-12-04 15:28:37 +0000 UTC" firstStartedPulling="2025-12-04 15:28:39.858495894 +0000 UTC m=+1570.744539555" lastFinishedPulling="2025-12-04 15:28:46.430370091 +0000 UTC m=+1577.316413732" observedRunningTime="2025-12-04 15:28:46.974619258 +0000 UTC m=+1577.860662929" watchObservedRunningTime="2025-12-04 15:28:46.979913451 +0000 UTC m=+1577.865957082" Dec 04 15:28:48 crc kubenswrapper[4946]: I1204 15:28:48.106089 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:48 crc kubenswrapper[4946]: I1204 15:28:48.106553 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:49 crc kubenswrapper[4946]: I1204 15:28:49.162927 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-qjfsf" podUID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerName="registry-server" probeResult="failure" output=< Dec 04 15:28:49 crc kubenswrapper[4946]: timeout: failed to connect service ":50051" within 1s Dec 04 15:28:49 crc kubenswrapper[4946]: > Dec 04 15:28:52 crc kubenswrapper[4946]: I1204 15:28:52.478500 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:28:52 crc kubenswrapper[4946]: I1204 15:28:52.481330 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.395280 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-w5njq"] Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.408215 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-w5njq"] Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.466804 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6df584d-65d7-4829-8937-3ac0ab49b71b" path="/var/lib/kubelet/pods/c6df584d-65d7-4829-8937-3ac0ab49b71b/volumes" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.485682 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-jkwmj"] Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.487729 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.491678 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.511452 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-jkwmj"] Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.618853 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-combined-ca-bundle\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.618985 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-scripts\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.619053 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-certs\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.619086 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8cd4\" (UniqueName: \"kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-kube-api-access-l8cd4\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.619176 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-config-data\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.721721 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-scripts\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.723027 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-certs\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.723072 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8cd4\" (UniqueName: \"kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-kube-api-access-l8cd4\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.723166 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-config-data\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.723226 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-combined-ca-bundle\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.731384 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-scripts\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.731792 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-config-data\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.731993 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-combined-ca-bundle\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.732285 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-certs\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.742552 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8cd4\" (UniqueName: \"kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-kube-api-access-l8cd4\") pod \"cloudkitty-db-sync-jkwmj\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:55 crc kubenswrapper[4946]: I1204 15:28:55.812477 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:28:56 crc kubenswrapper[4946]: I1204 15:28:56.335418 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-jkwmj"] Dec 04 15:28:57 crc kubenswrapper[4946]: I1204 15:28:57.071630 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-jkwmj" event={"ID":"95ee854c-ccd7-4292-b874-9ce160fc8988","Type":"ContainerStarted","Data":"cce95cddbce0624e9897efda403a741c4a359436939488d322c41df80ae415d8"} Dec 04 15:28:57 crc kubenswrapper[4946]: I1204 15:28:57.234592 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:28:57 crc kubenswrapper[4946]: I1204 15:28:57.235018 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="ceilometer-central-agent" containerID="cri-o://56fed4a121081216984822cb39fd8464c5506d0744654c746283d6e1dcb276f5" gracePeriod=30 Dec 04 15:28:57 crc kubenswrapper[4946]: I1204 15:28:57.235097 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="sg-core" containerID="cri-o://ce0ca07982b1a781d7baba4854449f1217660cd72731873e04fa4da365b65edb" gracePeriod=30 Dec 04 15:28:57 crc kubenswrapper[4946]: I1204 15:28:57.235144 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="ceilometer-notification-agent" containerID="cri-o://b0c9717788c6e409a4da344bd7b050702b5e979d1c6cb31e5aeb51e6e92b3de7" gracePeriod=30 Dec 04 15:28:57 crc kubenswrapper[4946]: I1204 15:28:57.235517 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="proxy-httpd" containerID="cri-o://ad87e480f6111b4ebe14e9167cd01f53e10105da43fd1dbaa73d6a05bd1006a8" gracePeriod=30 Dec 04 15:28:57 crc kubenswrapper[4946]: I1204 15:28:57.392108 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.102780 4946 generic.go:334] "Generic (PLEG): container finished" podID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerID="ad87e480f6111b4ebe14e9167cd01f53e10105da43fd1dbaa73d6a05bd1006a8" exitCode=0 Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.103294 4946 generic.go:334] "Generic (PLEG): container finished" podID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerID="ce0ca07982b1a781d7baba4854449f1217660cd72731873e04fa4da365b65edb" exitCode=2 Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.103309 4946 generic.go:334] "Generic (PLEG): container finished" podID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerID="b0c9717788c6e409a4da344bd7b050702b5e979d1c6cb31e5aeb51e6e92b3de7" exitCode=0 Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.103318 4946 generic.go:334] "Generic (PLEG): container finished" podID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerID="56fed4a121081216984822cb39fd8464c5506d0744654c746283d6e1dcb276f5" exitCode=0 Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.103346 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8fee5427-4b83-40a3-8f2d-765c2237394d","Type":"ContainerDied","Data":"ad87e480f6111b4ebe14e9167cd01f53e10105da43fd1dbaa73d6a05bd1006a8"} Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.103379 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8fee5427-4b83-40a3-8f2d-765c2237394d","Type":"ContainerDied","Data":"ce0ca07982b1a781d7baba4854449f1217660cd72731873e04fa4da365b65edb"} Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.103393 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8fee5427-4b83-40a3-8f2d-765c2237394d","Type":"ContainerDied","Data":"b0c9717788c6e409a4da344bd7b050702b5e979d1c6cb31e5aeb51e6e92b3de7"} Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.103407 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8fee5427-4b83-40a3-8f2d-765c2237394d","Type":"ContainerDied","Data":"56fed4a121081216984822cb39fd8464c5506d0744654c746283d6e1dcb276f5"} Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.179774 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.294548 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.464522 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qjfsf"] Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.679520 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.847031 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.950441 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-sg-core-conf-yaml\") pod \"8fee5427-4b83-40a3-8f2d-765c2237394d\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.950539 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-config-data\") pod \"8fee5427-4b83-40a3-8f2d-765c2237394d\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.950592 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-run-httpd\") pod \"8fee5427-4b83-40a3-8f2d-765c2237394d\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.950678 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-scripts\") pod \"8fee5427-4b83-40a3-8f2d-765c2237394d\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.950865 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-combined-ca-bundle\") pod \"8fee5427-4b83-40a3-8f2d-765c2237394d\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.950904 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-ceilometer-tls-certs\") pod \"8fee5427-4b83-40a3-8f2d-765c2237394d\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.950949 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-log-httpd\") pod \"8fee5427-4b83-40a3-8f2d-765c2237394d\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.951029 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-627f2\" (UniqueName: \"kubernetes.io/projected/8fee5427-4b83-40a3-8f2d-765c2237394d-kube-api-access-627f2\") pod \"8fee5427-4b83-40a3-8f2d-765c2237394d\" (UID: \"8fee5427-4b83-40a3-8f2d-765c2237394d\") " Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.954355 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8fee5427-4b83-40a3-8f2d-765c2237394d" (UID: "8fee5427-4b83-40a3-8f2d-765c2237394d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.956286 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8fee5427-4b83-40a3-8f2d-765c2237394d" (UID: "8fee5427-4b83-40a3-8f2d-765c2237394d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.966816 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fee5427-4b83-40a3-8f2d-765c2237394d-kube-api-access-627f2" (OuterVolumeSpecName: "kube-api-access-627f2") pod "8fee5427-4b83-40a3-8f2d-765c2237394d" (UID: "8fee5427-4b83-40a3-8f2d-765c2237394d"). InnerVolumeSpecName "kube-api-access-627f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:28:58 crc kubenswrapper[4946]: I1204 15:28:58.996430 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-scripts" (OuterVolumeSpecName: "scripts") pod "8fee5427-4b83-40a3-8f2d-765c2237394d" (UID: "8fee5427-4b83-40a3-8f2d-765c2237394d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.006366 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8fee5427-4b83-40a3-8f2d-765c2237394d" (UID: "8fee5427-4b83-40a3-8f2d-765c2237394d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.048260 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "8fee5427-4b83-40a3-8f2d-765c2237394d" (UID: "8fee5427-4b83-40a3-8f2d-765c2237394d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.053529 4946 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.053562 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.053572 4946 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.053584 4946 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8fee5427-4b83-40a3-8f2d-765c2237394d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.053592 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-627f2\" (UniqueName: \"kubernetes.io/projected/8fee5427-4b83-40a3-8f2d-765c2237394d-kube-api-access-627f2\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.053603 4946 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.080431 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8fee5427-4b83-40a3-8f2d-765c2237394d" (UID: "8fee5427-4b83-40a3-8f2d-765c2237394d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.133897 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.133885 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8fee5427-4b83-40a3-8f2d-765c2237394d","Type":"ContainerDied","Data":"5dc21cd30ab6e7aaa3e80c751080780b58535a66049b86b519eff2b5d4f1ac47"} Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.133976 4946 scope.go:117] "RemoveContainer" containerID="ad87e480f6111b4ebe14e9167cd01f53e10105da43fd1dbaa73d6a05bd1006a8" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.156465 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.197697 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-config-data" (OuterVolumeSpecName: "config-data") pod "8fee5427-4b83-40a3-8f2d-765c2237394d" (UID: "8fee5427-4b83-40a3-8f2d-765c2237394d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.245765 4946 scope.go:117] "RemoveContainer" containerID="ce0ca07982b1a781d7baba4854449f1217660cd72731873e04fa4da365b65edb" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.258666 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fee5427-4b83-40a3-8f2d-765c2237394d-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.275276 4946 scope.go:117] "RemoveContainer" containerID="b0c9717788c6e409a4da344bd7b050702b5e979d1c6cb31e5aeb51e6e92b3de7" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.323204 4946 scope.go:117] "RemoveContainer" containerID="56fed4a121081216984822cb39fd8464c5506d0744654c746283d6e1dcb276f5" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.488833 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.501824 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.515067 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:28:59 crc kubenswrapper[4946]: E1204 15:28:59.515607 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="proxy-httpd" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.515625 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="proxy-httpd" Dec 04 15:28:59 crc kubenswrapper[4946]: E1204 15:28:59.515651 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="ceilometer-notification-agent" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.515658 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="ceilometer-notification-agent" Dec 04 15:28:59 crc kubenswrapper[4946]: E1204 15:28:59.515678 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="ceilometer-central-agent" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.515689 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="ceilometer-central-agent" Dec 04 15:28:59 crc kubenswrapper[4946]: E1204 15:28:59.515702 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="sg-core" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.515709 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="sg-core" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.515931 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="ceilometer-notification-agent" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.515961 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="ceilometer-central-agent" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.515978 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="proxy-httpd" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.515993 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" containerName="sg-core" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.518092 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.521059 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.521891 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.522089 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.539734 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.668200 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.668267 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-scripts\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.668294 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-config-data\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.668361 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.668394 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.668421 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/655a0ab4-533d-4447-8656-72742f94f4a7-run-httpd\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.668448 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/655a0ab4-533d-4447-8656-72742f94f4a7-log-httpd\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.668509 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc67b\" (UniqueName: \"kubernetes.io/projected/655a0ab4-533d-4447-8656-72742f94f4a7-kube-api-access-nc67b\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.770911 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc67b\" (UniqueName: \"kubernetes.io/projected/655a0ab4-533d-4447-8656-72742f94f4a7-kube-api-access-nc67b\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.771094 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.772057 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-scripts\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.772107 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-config-data\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.772327 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.772417 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.772477 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/655a0ab4-533d-4447-8656-72742f94f4a7-run-httpd\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.772549 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/655a0ab4-533d-4447-8656-72742f94f4a7-log-httpd\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.773110 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/655a0ab4-533d-4447-8656-72742f94f4a7-log-httpd\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.773500 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/655a0ab4-533d-4447-8656-72742f94f4a7-run-httpd\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.780386 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.780661 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-config-data\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.781137 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-scripts\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.781663 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.790535 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/655a0ab4-533d-4447-8656-72742f94f4a7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.798020 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc67b\" (UniqueName: \"kubernetes.io/projected/655a0ab4-533d-4447-8656-72742f94f4a7-kube-api-access-nc67b\") pod \"ceilometer-0\" (UID: \"655a0ab4-533d-4447-8656-72742f94f4a7\") " pod="openstack/ceilometer-0" Dec 04 15:28:59 crc kubenswrapper[4946]: I1204 15:28:59.842289 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 15:29:00 crc kubenswrapper[4946]: I1204 15:29:00.159609 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qjfsf" podUID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerName="registry-server" containerID="cri-o://7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935" gracePeriod=2 Dec 04 15:29:00 crc kubenswrapper[4946]: I1204 15:29:00.496573 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 15:29:00 crc kubenswrapper[4946]: I1204 15:29:00.870593 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.004794 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxrrz\" (UniqueName: \"kubernetes.io/projected/51d0224f-2d30-47b7-91fd-78e66a782ee9-kube-api-access-zxrrz\") pod \"51d0224f-2d30-47b7-91fd-78e66a782ee9\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.004972 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-utilities\") pod \"51d0224f-2d30-47b7-91fd-78e66a782ee9\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.005039 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-catalog-content\") pod \"51d0224f-2d30-47b7-91fd-78e66a782ee9\" (UID: \"51d0224f-2d30-47b7-91fd-78e66a782ee9\") " Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.007862 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-utilities" (OuterVolumeSpecName: "utilities") pod "51d0224f-2d30-47b7-91fd-78e66a782ee9" (UID: "51d0224f-2d30-47b7-91fd-78e66a782ee9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.030320 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51d0224f-2d30-47b7-91fd-78e66a782ee9-kube-api-access-zxrrz" (OuterVolumeSpecName: "kube-api-access-zxrrz") pod "51d0224f-2d30-47b7-91fd-78e66a782ee9" (UID: "51d0224f-2d30-47b7-91fd-78e66a782ee9"). InnerVolumeSpecName "kube-api-access-zxrrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.084343 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "51d0224f-2d30-47b7-91fd-78e66a782ee9" (UID: "51d0224f-2d30-47b7-91fd-78e66a782ee9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.107881 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxrrz\" (UniqueName: \"kubernetes.io/projected/51d0224f-2d30-47b7-91fd-78e66a782ee9-kube-api-access-zxrrz\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.107916 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.107927 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51d0224f-2d30-47b7-91fd-78e66a782ee9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.181478 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"655a0ab4-533d-4447-8656-72742f94f4a7","Type":"ContainerStarted","Data":"f8193fd16b1ae92fe49742bcf6461784f6615c8f7632f5d8fa3b21c83319db9f"} Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.186763 4946 generic.go:334] "Generic (PLEG): container finished" podID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerID="7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935" exitCode=0 Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.186829 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjfsf" event={"ID":"51d0224f-2d30-47b7-91fd-78e66a782ee9","Type":"ContainerDied","Data":"7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935"} Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.186885 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjfsf" event={"ID":"51d0224f-2d30-47b7-91fd-78e66a782ee9","Type":"ContainerDied","Data":"dc7f1984c65afa658cce80a6d5d32f0fcd26f65003f98b30daa91b5722c1313d"} Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.186920 4946 scope.go:117] "RemoveContainer" containerID="7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.187160 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qjfsf" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.244360 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qjfsf"] Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.263320 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qjfsf"] Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.273249 4946 scope.go:117] "RemoveContainer" containerID="57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.409083 4946 scope.go:117] "RemoveContainer" containerID="007cfa3c1c272ba146a2815368d2a8ed5c0405c48cb70f339240e9b7be78193a" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.484201 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51d0224f-2d30-47b7-91fd-78e66a782ee9" path="/var/lib/kubelet/pods/51d0224f-2d30-47b7-91fd-78e66a782ee9/volumes" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.485981 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fee5427-4b83-40a3-8f2d-765c2237394d" path="/var/lib/kubelet/pods/8fee5427-4b83-40a3-8f2d-765c2237394d/volumes" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.519054 4946 scope.go:117] "RemoveContainer" containerID="7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935" Dec 04 15:29:01 crc kubenswrapper[4946]: E1204 15:29:01.519491 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935\": container with ID starting with 7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935 not found: ID does not exist" containerID="7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.519546 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935"} err="failed to get container status \"7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935\": rpc error: code = NotFound desc = could not find container \"7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935\": container with ID starting with 7cb49a1eed2ce48c3001bc46bc73c43c1bce6a8fafc99ca76b859bc769472935 not found: ID does not exist" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.519582 4946 scope.go:117] "RemoveContainer" containerID="57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7" Dec 04 15:29:01 crc kubenswrapper[4946]: E1204 15:29:01.520393 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7\": container with ID starting with 57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7 not found: ID does not exist" containerID="57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.520421 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7"} err="failed to get container status \"57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7\": rpc error: code = NotFound desc = could not find container \"57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7\": container with ID starting with 57c482431710f40cf01c0bf01912e30690faa5836f32054ac7e94c9725a4e9e7 not found: ID does not exist" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.520441 4946 scope.go:117] "RemoveContainer" containerID="007cfa3c1c272ba146a2815368d2a8ed5c0405c48cb70f339240e9b7be78193a" Dec 04 15:29:01 crc kubenswrapper[4946]: E1204 15:29:01.520860 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"007cfa3c1c272ba146a2815368d2a8ed5c0405c48cb70f339240e9b7be78193a\": container with ID starting with 007cfa3c1c272ba146a2815368d2a8ed5c0405c48cb70f339240e9b7be78193a not found: ID does not exist" containerID="007cfa3c1c272ba146a2815368d2a8ed5c0405c48cb70f339240e9b7be78193a" Dec 04 15:29:01 crc kubenswrapper[4946]: I1204 15:29:01.520882 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"007cfa3c1c272ba146a2815368d2a8ed5c0405c48cb70f339240e9b7be78193a"} err="failed to get container status \"007cfa3c1c272ba146a2815368d2a8ed5c0405c48cb70f339240e9b7be78193a\": rpc error: code = NotFound desc = could not find container \"007cfa3c1c272ba146a2815368d2a8ed5c0405c48cb70f339240e9b7be78193a\": container with ID starting with 007cfa3c1c272ba146a2815368d2a8ed5c0405c48cb70f339240e9b7be78193a not found: ID does not exist" Dec 04 15:29:02 crc kubenswrapper[4946]: I1204 15:29:02.444373 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="920eb4d8-3aa1-4141-9f65-647e275405e4" containerName="rabbitmq" containerID="cri-o://61250f67537c021bfba802c2cf1e24e1c4b4ad54a1378fe5f316a10c39333c42" gracePeriod=604795 Dec 04 15:29:02 crc kubenswrapper[4946]: I1204 15:29:02.870454 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vb4zl"] Dec 04 15:29:02 crc kubenswrapper[4946]: E1204 15:29:02.871614 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerName="extract-content" Dec 04 15:29:02 crc kubenswrapper[4946]: I1204 15:29:02.871637 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerName="extract-content" Dec 04 15:29:02 crc kubenswrapper[4946]: E1204 15:29:02.871695 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerName="registry-server" Dec 04 15:29:02 crc kubenswrapper[4946]: I1204 15:29:02.871706 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerName="registry-server" Dec 04 15:29:02 crc kubenswrapper[4946]: E1204 15:29:02.871718 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerName="extract-utilities" Dec 04 15:29:02 crc kubenswrapper[4946]: I1204 15:29:02.871727 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerName="extract-utilities" Dec 04 15:29:02 crc kubenswrapper[4946]: I1204 15:29:02.871995 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="51d0224f-2d30-47b7-91fd-78e66a782ee9" containerName="registry-server" Dec 04 15:29:02 crc kubenswrapper[4946]: I1204 15:29:02.873982 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:02 crc kubenswrapper[4946]: I1204 15:29:02.887875 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vb4zl"] Dec 04 15:29:02 crc kubenswrapper[4946]: I1204 15:29:02.954824 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wg65m\" (UniqueName: \"kubernetes.io/projected/f7735402-ad72-4b1b-8028-f0a78d9bff4d-kube-api-access-wg65m\") pod \"community-operators-vb4zl\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:02 crc kubenswrapper[4946]: I1204 15:29:02.954909 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-catalog-content\") pod \"community-operators-vb4zl\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:02 crc kubenswrapper[4946]: I1204 15:29:02.954951 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-utilities\") pod \"community-operators-vb4zl\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:03 crc kubenswrapper[4946]: I1204 15:29:03.057069 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wg65m\" (UniqueName: \"kubernetes.io/projected/f7735402-ad72-4b1b-8028-f0a78d9bff4d-kube-api-access-wg65m\") pod \"community-operators-vb4zl\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:03 crc kubenswrapper[4946]: I1204 15:29:03.057188 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-catalog-content\") pod \"community-operators-vb4zl\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:03 crc kubenswrapper[4946]: I1204 15:29:03.057242 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-utilities\") pod \"community-operators-vb4zl\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:03 crc kubenswrapper[4946]: I1204 15:29:03.057818 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-catalog-content\") pod \"community-operators-vb4zl\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:03 crc kubenswrapper[4946]: I1204 15:29:03.057873 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-utilities\") pod \"community-operators-vb4zl\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:03 crc kubenswrapper[4946]: I1204 15:29:03.076907 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wg65m\" (UniqueName: \"kubernetes.io/projected/f7735402-ad72-4b1b-8028-f0a78d9bff4d-kube-api-access-wg65m\") pod \"community-operators-vb4zl\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:03 crc kubenswrapper[4946]: I1204 15:29:03.205216 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:03 crc kubenswrapper[4946]: I1204 15:29:03.842309 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vb4zl"] Dec 04 15:29:03 crc kubenswrapper[4946]: W1204 15:29:03.855695 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7735402_ad72_4b1b_8028_f0a78d9bff4d.slice/crio-a43f299d03b22e74aaa4ff30c1eda2b57bb2ae5542ea122f53aa2c90fee484e8 WatchSource:0}: Error finding container a43f299d03b22e74aaa4ff30c1eda2b57bb2ae5542ea122f53aa2c90fee484e8: Status 404 returned error can't find the container with id a43f299d03b22e74aaa4ff30c1eda2b57bb2ae5542ea122f53aa2c90fee484e8 Dec 04 15:29:04 crc kubenswrapper[4946]: I1204 15:29:04.233267 4946 generic.go:334] "Generic (PLEG): container finished" podID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" containerID="9a60eee3f3ab381ab2617864f2854400c38a784e9c2761ba66c69fc06157363a" exitCode=0 Dec 04 15:29:04 crc kubenswrapper[4946]: I1204 15:29:04.233320 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vb4zl" event={"ID":"f7735402-ad72-4b1b-8028-f0a78d9bff4d","Type":"ContainerDied","Data":"9a60eee3f3ab381ab2617864f2854400c38a784e9c2761ba66c69fc06157363a"} Dec 04 15:29:04 crc kubenswrapper[4946]: I1204 15:29:04.233368 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vb4zl" event={"ID":"f7735402-ad72-4b1b-8028-f0a78d9bff4d","Type":"ContainerStarted","Data":"a43f299d03b22e74aaa4ff30c1eda2b57bb2ae5542ea122f53aa2c90fee484e8"} Dec 04 15:29:04 crc kubenswrapper[4946]: I1204 15:29:04.380966 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="f65583d1-046b-463a-9101-2074072a94f0" containerName="rabbitmq" containerID="cri-o://793aaf1a1a7437e7e5a4a080f87f02b82e6a64f68c3b9c9cfcb825f04c561997" gracePeriod=604795 Dec 04 15:29:06 crc kubenswrapper[4946]: I1204 15:29:06.987995 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="920eb4d8-3aa1-4141-9f65-647e275405e4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: connect: connection refused" Dec 04 15:29:07 crc kubenswrapper[4946]: I1204 15:29:07.741087 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="f65583d1-046b-463a-9101-2074072a94f0" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.109:5671: connect: connection refused" Dec 04 15:29:10 crc kubenswrapper[4946]: I1204 15:29:10.308251 4946 generic.go:334] "Generic (PLEG): container finished" podID="920eb4d8-3aa1-4141-9f65-647e275405e4" containerID="61250f67537c021bfba802c2cf1e24e1c4b4ad54a1378fe5f316a10c39333c42" exitCode=0 Dec 04 15:29:10 crc kubenswrapper[4946]: I1204 15:29:10.308331 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"920eb4d8-3aa1-4141-9f65-647e275405e4","Type":"ContainerDied","Data":"61250f67537c021bfba802c2cf1e24e1c4b4ad54a1378fe5f316a10c39333c42"} Dec 04 15:29:11 crc kubenswrapper[4946]: I1204 15:29:11.324523 4946 generic.go:334] "Generic (PLEG): container finished" podID="f65583d1-046b-463a-9101-2074072a94f0" containerID="793aaf1a1a7437e7e5a4a080f87f02b82e6a64f68c3b9c9cfcb825f04c561997" exitCode=0 Dec 04 15:29:11 crc kubenswrapper[4946]: I1204 15:29:11.324594 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f65583d1-046b-463a-9101-2074072a94f0","Type":"ContainerDied","Data":"793aaf1a1a7437e7e5a4a080f87f02b82e6a64f68c3b9c9cfcb825f04c561997"} Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.550420 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-sl69k"] Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.552439 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.556521 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.580031 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-sl69k"] Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.593106 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.593220 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfhzl\" (UniqueName: \"kubernetes.io/projected/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-kube-api-access-lfhzl\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.593304 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.593333 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.593392 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-config\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.593427 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.593449 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.696679 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfhzl\" (UniqueName: \"kubernetes.io/projected/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-kube-api-access-lfhzl\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.696839 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.696869 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.696949 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-config\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.697015 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.697047 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.697163 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.698154 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.698247 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.699439 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.699665 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.700438 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.705853 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-config\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.723256 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfhzl\" (UniqueName: \"kubernetes.io/projected/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-kube-api-access-lfhzl\") pod \"dnsmasq-dns-dbb88bf8c-sl69k\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:12 crc kubenswrapper[4946]: I1204 15:29:12.877105 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.307869 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.403467 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-plugins\") pod \"920eb4d8-3aa1-4141-9f65-647e275405e4\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.403636 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-plugins-conf\") pod \"920eb4d8-3aa1-4141-9f65-647e275405e4\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.403702 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-config-data\") pod \"920eb4d8-3aa1-4141-9f65-647e275405e4\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.404591 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a\") pod \"920eb4d8-3aa1-4141-9f65-647e275405e4\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.404776 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-server-conf\") pod \"920eb4d8-3aa1-4141-9f65-647e275405e4\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.404866 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/920eb4d8-3aa1-4141-9f65-647e275405e4-erlang-cookie-secret\") pod \"920eb4d8-3aa1-4141-9f65-647e275405e4\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.404968 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/920eb4d8-3aa1-4141-9f65-647e275405e4-pod-info\") pod \"920eb4d8-3aa1-4141-9f65-647e275405e4\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.405041 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "920eb4d8-3aa1-4141-9f65-647e275405e4" (UID: "920eb4d8-3aa1-4141-9f65-647e275405e4"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.405104 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-confd\") pod \"920eb4d8-3aa1-4141-9f65-647e275405e4\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.405243 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-erlang-cookie\") pod \"920eb4d8-3aa1-4141-9f65-647e275405e4\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.405384 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-tls\") pod \"920eb4d8-3aa1-4141-9f65-647e275405e4\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.405428 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xl67j\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-kube-api-access-xl67j\") pod \"920eb4d8-3aa1-4141-9f65-647e275405e4\" (UID: \"920eb4d8-3aa1-4141-9f65-647e275405e4\") " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.406929 4946 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.408490 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "920eb4d8-3aa1-4141-9f65-647e275405e4" (UID: "920eb4d8-3aa1-4141-9f65-647e275405e4"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.408610 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "920eb4d8-3aa1-4141-9f65-647e275405e4" (UID: "920eb4d8-3aa1-4141-9f65-647e275405e4"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.419802 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/920eb4d8-3aa1-4141-9f65-647e275405e4-pod-info" (OuterVolumeSpecName: "pod-info") pod "920eb4d8-3aa1-4141-9f65-647e275405e4" (UID: "920eb4d8-3aa1-4141-9f65-647e275405e4"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.420764 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/920eb4d8-3aa1-4141-9f65-647e275405e4-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "920eb4d8-3aa1-4141-9f65-647e275405e4" (UID: "920eb4d8-3aa1-4141-9f65-647e275405e4"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.421533 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "920eb4d8-3aa1-4141-9f65-647e275405e4" (UID: "920eb4d8-3aa1-4141-9f65-647e275405e4"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.425006 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-kube-api-access-xl67j" (OuterVolumeSpecName: "kube-api-access-xl67j") pod "920eb4d8-3aa1-4141-9f65-647e275405e4" (UID: "920eb4d8-3aa1-4141-9f65-647e275405e4"). InnerVolumeSpecName "kube-api-access-xl67j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.455811 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a" (OuterVolumeSpecName: "persistence") pod "920eb4d8-3aa1-4141-9f65-647e275405e4" (UID: "920eb4d8-3aa1-4141-9f65-647e275405e4"). InnerVolumeSpecName "pvc-8510755d-2baa-400e-9d96-253271d5105a". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.497913 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-config-data" (OuterVolumeSpecName: "config-data") pod "920eb4d8-3aa1-4141-9f65-647e275405e4" (UID: "920eb4d8-3aa1-4141-9f65-647e275405e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.522005 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xl67j\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-kube-api-access-xl67j\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.522042 4946 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.522054 4946 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.522064 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.522091 4946 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-8510755d-2baa-400e-9d96-253271d5105a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a\") on node \"crc\" " Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.522102 4946 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/920eb4d8-3aa1-4141-9f65-647e275405e4-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.522127 4946 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/920eb4d8-3aa1-4141-9f65-647e275405e4-pod-info\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.522137 4946 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.532620 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-server-conf" (OuterVolumeSpecName: "server-conf") pod "920eb4d8-3aa1-4141-9f65-647e275405e4" (UID: "920eb4d8-3aa1-4141-9f65-647e275405e4"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.568697 4946 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.568862 4946 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-8510755d-2baa-400e-9d96-253271d5105a" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a") on node "crc" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.569098 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.626368 4946 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/920eb4d8-3aa1-4141-9f65-647e275405e4-server-conf\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.626406 4946 reconciler_common.go:293] "Volume detached for volume \"pvc-8510755d-2baa-400e-9d96-253271d5105a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.629269 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"920eb4d8-3aa1-4141-9f65-647e275405e4","Type":"ContainerDied","Data":"270e8a017ba79e02cdf840f16e87e8cd6aa422619997e0f9261c49ed12f88f11"} Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.629323 4946 scope.go:117] "RemoveContainer" containerID="61250f67537c021bfba802c2cf1e24e1c4b4ad54a1378fe5f316a10c39333c42" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.921003 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "920eb4d8-3aa1-4141-9f65-647e275405e4" (UID: "920eb4d8-3aa1-4141-9f65-647e275405e4"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:17 crc kubenswrapper[4946]: I1204 15:29:17.934274 4946 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/920eb4d8-3aa1-4141-9f65-647e275405e4-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.276232 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.285459 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.313003 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 15:29:18 crc kubenswrapper[4946]: E1204 15:29:18.313817 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="920eb4d8-3aa1-4141-9f65-647e275405e4" containerName="rabbitmq" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.313839 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="920eb4d8-3aa1-4141-9f65-647e275405e4" containerName="rabbitmq" Dec 04 15:29:18 crc kubenswrapper[4946]: E1204 15:29:18.313895 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="920eb4d8-3aa1-4141-9f65-647e275405e4" containerName="setup-container" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.313903 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="920eb4d8-3aa1-4141-9f65-647e275405e4" containerName="setup-container" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.314166 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="920eb4d8-3aa1-4141-9f65-647e275405e4" containerName="rabbitmq" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.315901 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.319550 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.319733 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.319611 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.319665 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.319703 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.319700 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-9dtg8" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.320496 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.323577 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.446819 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.446942 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.447034 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.447064 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn65v\" (UniqueName: \"kubernetes.io/projected/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-kube-api-access-tn65v\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.447218 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.447253 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.447315 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8510755d-2baa-400e-9d96-253271d5105a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.447345 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.447388 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.447441 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-config-data\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.447458 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.549511 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn65v\" (UniqueName: \"kubernetes.io/projected/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-kube-api-access-tn65v\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.549841 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.549949 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.550090 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8510755d-2baa-400e-9d96-253271d5105a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.550346 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.550522 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.550714 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-config-data\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.550840 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.551058 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.551667 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-config-data\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.551731 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.553758 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.558514 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8510755d-2baa-400e-9d96-253271d5105a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8ca06f9d5466f4e0c9ad816f0448639905c6d9d82260c4835e3d42492d756057/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.555802 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.556553 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.557971 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.556532 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.558810 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.559027 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.559182 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.560197 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.568868 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.573148 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn65v\" (UniqueName: \"kubernetes.io/projected/f75f35c6-b58d-471d-9b5e-2d402f3ce92f-kube-api-access-tn65v\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.636744 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8510755d-2baa-400e-9d96-253271d5105a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8510755d-2baa-400e-9d96-253271d5105a\") pod \"rabbitmq-server-0\" (UID: \"f75f35c6-b58d-471d-9b5e-2d402f3ce92f\") " pod="openstack/rabbitmq-server-0" Dec 04 15:29:18 crc kubenswrapper[4946]: I1204 15:29:18.937560 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 15:29:19 crc kubenswrapper[4946]: I1204 15:29:19.495161 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="920eb4d8-3aa1-4141-9f65-647e275405e4" path="/var/lib/kubelet/pods/920eb4d8-3aa1-4141-9f65-647e275405e4/volumes" Dec 04 15:29:21 crc kubenswrapper[4946]: I1204 15:29:21.988173 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="920eb4d8-3aa1-4141-9f65-647e275405e4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: i/o timeout" Dec 04 15:29:22 crc kubenswrapper[4946]: I1204 15:29:22.478962 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:29:22 crc kubenswrapper[4946]: I1204 15:29:22.479038 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:29:22 crc kubenswrapper[4946]: I1204 15:29:22.741458 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="f65583d1-046b-463a-9101-2074072a94f0" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.109:5671: i/o timeout" Dec 04 15:29:23 crc kubenswrapper[4946]: E1204 15:29:23.820728 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested" Dec 04 15:29:23 crc kubenswrapper[4946]: E1204 15:29:23.821132 4946 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested" Dec 04 15:29:23 crc kubenswrapper[4946]: E1204 15:29:23.821286 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l8cd4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-jkwmj_openstack(95ee854c-ccd7-4292-b874-9ce160fc8988): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:29:23 crc kubenswrapper[4946]: E1204 15:29:23.822468 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-jkwmj" podUID="95ee854c-ccd7-4292-b874-9ce160fc8988" Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.886097 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.953464 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.954233 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f65583d1-046b-463a-9101-2074072a94f0","Type":"ContainerDied","Data":"d9d2d64d0095b5d509e0b3273ae122061e1b9b7af868b86d24864ef63a256202"} Dec 04 15:29:23 crc kubenswrapper[4946]: E1204 15:29:23.955700 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested\\\"\"" pod="openstack/cloudkitty-db-sync-jkwmj" podUID="95ee854c-ccd7-4292-b874-9ce160fc8988" Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.992280 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-config-data\") pod \"f65583d1-046b-463a-9101-2074072a94f0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.992398 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-server-conf\") pod \"f65583d1-046b-463a-9101-2074072a94f0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.992492 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f65583d1-046b-463a-9101-2074072a94f0-erlang-cookie-secret\") pod \"f65583d1-046b-463a-9101-2074072a94f0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.992558 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f65583d1-046b-463a-9101-2074072a94f0-pod-info\") pod \"f65583d1-046b-463a-9101-2074072a94f0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.992589 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-plugins-conf\") pod \"f65583d1-046b-463a-9101-2074072a94f0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.992690 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-erlang-cookie\") pod \"f65583d1-046b-463a-9101-2074072a94f0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.992722 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxdz4\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-kube-api-access-kxdz4\") pod \"f65583d1-046b-463a-9101-2074072a94f0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.992744 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-confd\") pod \"f65583d1-046b-463a-9101-2074072a94f0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.993415 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\") pod \"f65583d1-046b-463a-9101-2074072a94f0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.993497 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-tls\") pod \"f65583d1-046b-463a-9101-2074072a94f0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.993576 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-plugins\") pod \"f65583d1-046b-463a-9101-2074072a94f0\" (UID: \"f65583d1-046b-463a-9101-2074072a94f0\") " Dec 04 15:29:23 crc kubenswrapper[4946]: I1204 15:29:23.995162 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "f65583d1-046b-463a-9101-2074072a94f0" (UID: "f65583d1-046b-463a-9101-2074072a94f0"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:23.998207 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "f65583d1-046b-463a-9101-2074072a94f0" (UID: "f65583d1-046b-463a-9101-2074072a94f0"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:23.999018 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "f65583d1-046b-463a-9101-2074072a94f0" (UID: "f65583d1-046b-463a-9101-2074072a94f0"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.015177 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f65583d1-046b-463a-9101-2074072a94f0-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "f65583d1-046b-463a-9101-2074072a94f0" (UID: "f65583d1-046b-463a-9101-2074072a94f0"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.015417 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "f65583d1-046b-463a-9101-2074072a94f0" (UID: "f65583d1-046b-463a-9101-2074072a94f0"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.017291 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-kube-api-access-kxdz4" (OuterVolumeSpecName: "kube-api-access-kxdz4") pod "f65583d1-046b-463a-9101-2074072a94f0" (UID: "f65583d1-046b-463a-9101-2074072a94f0"). InnerVolumeSpecName "kube-api-access-kxdz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.070276 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/f65583d1-046b-463a-9101-2074072a94f0-pod-info" (OuterVolumeSpecName: "pod-info") pod "f65583d1-046b-463a-9101-2074072a94f0" (UID: "f65583d1-046b-463a-9101-2074072a94f0"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.071692 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-config-data" (OuterVolumeSpecName: "config-data") pod "f65583d1-046b-463a-9101-2074072a94f0" (UID: "f65583d1-046b-463a-9101-2074072a94f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.071816 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b" (OuterVolumeSpecName: "persistence") pod "f65583d1-046b-463a-9101-2074072a94f0" (UID: "f65583d1-046b-463a-9101-2074072a94f0"). InnerVolumeSpecName "pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.101219 4946 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f65583d1-046b-463a-9101-2074072a94f0-pod-info\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.101692 4946 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.101709 4946 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.101723 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxdz4\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-kube-api-access-kxdz4\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.101758 4946 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\") on node \"crc\" " Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.101771 4946 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.101787 4946 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.101798 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.101808 4946 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f65583d1-046b-463a-9101-2074072a94f0-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.102426 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-server-conf" (OuterVolumeSpecName: "server-conf") pod "f65583d1-046b-463a-9101-2074072a94f0" (UID: "f65583d1-046b-463a-9101-2074072a94f0"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.139170 4946 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.139355 4946 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b") on node "crc" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.178399 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "f65583d1-046b-463a-9101-2074072a94f0" (UID: "f65583d1-046b-463a-9101-2074072a94f0"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.203974 4946 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f65583d1-046b-463a-9101-2074072a94f0-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.204025 4946 reconciler_common.go:293] "Volume detached for volume \"pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.204043 4946 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f65583d1-046b-463a-9101-2074072a94f0-server-conf\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.300610 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.324600 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.337961 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 15:29:24 crc kubenswrapper[4946]: E1204 15:29:24.338501 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65583d1-046b-463a-9101-2074072a94f0" containerName="setup-container" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.338517 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65583d1-046b-463a-9101-2074072a94f0" containerName="setup-container" Dec 04 15:29:24 crc kubenswrapper[4946]: E1204 15:29:24.338536 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65583d1-046b-463a-9101-2074072a94f0" containerName="rabbitmq" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.338542 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65583d1-046b-463a-9101-2074072a94f0" containerName="rabbitmq" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.338751 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f65583d1-046b-463a-9101-2074072a94f0" containerName="rabbitmq" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.340015 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.342606 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.342860 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.344799 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.344917 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.344974 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.345053 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.346974 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dn75w" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.354094 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.407835 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6458626f-136f-475a-b7ad-cf32977e39eb-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.407895 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6458626f-136f-475a-b7ad-cf32977e39eb-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.407924 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg55g\" (UniqueName: \"kubernetes.io/projected/6458626f-136f-475a-b7ad-cf32977e39eb-kube-api-access-tg55g\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.408067 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6458626f-136f-475a-b7ad-cf32977e39eb-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.408132 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6458626f-136f-475a-b7ad-cf32977e39eb-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.408208 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.408227 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.408305 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.408445 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6458626f-136f-475a-b7ad-cf32977e39eb-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.408512 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.408596 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.510615 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6458626f-136f-475a-b7ad-cf32977e39eb-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.510686 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6458626f-136f-475a-b7ad-cf32977e39eb-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.510720 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg55g\" (UniqueName: \"kubernetes.io/projected/6458626f-136f-475a-b7ad-cf32977e39eb-kube-api-access-tg55g\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.510775 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6458626f-136f-475a-b7ad-cf32977e39eb-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.510791 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6458626f-136f-475a-b7ad-cf32977e39eb-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.510841 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.510860 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.510902 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.510931 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6458626f-136f-475a-b7ad-cf32977e39eb-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.510959 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.511002 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.512274 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.513346 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.514569 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6458626f-136f-475a-b7ad-cf32977e39eb-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.514575 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6458626f-136f-475a-b7ad-cf32977e39eb-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.515298 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6458626f-136f-475a-b7ad-cf32977e39eb-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.516720 4946 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.516762 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6ef0e6db955580b30200c839876ec91ed17a86e82fc3a8ead692ab0769c689a7/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.518867 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.519805 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6458626f-136f-475a-b7ad-cf32977e39eb-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.521795 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6458626f-136f-475a-b7ad-cf32977e39eb-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.522300 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6458626f-136f-475a-b7ad-cf32977e39eb-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.538345 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg55g\" (UniqueName: \"kubernetes.io/projected/6458626f-136f-475a-b7ad-cf32977e39eb-kube-api-access-tg55g\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.565911 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4d267dc8-65fe-4102-9721-c18ec24cdc4b\") pod \"rabbitmq-cell1-server-0\" (UID: \"6458626f-136f-475a-b7ad-cf32977e39eb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: E1204 15:29:24.662665 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested" Dec 04 15:29:24 crc kubenswrapper[4946]: E1204 15:29:24.663105 4946 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested" Dec 04 15:29:24 crc kubenswrapper[4946]: E1204 15:29:24.663341 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndbh66fh695h585hdbhchf5hcfh666h5cfh5fch9dh5c8h9dh5b9h549h65bh668h74h5hdhd6h658hbch656h559h8fhddh54fh57dh88h58fq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nc67b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(655a0ab4-533d-4447-8656-72742f94f4a7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.668909 4946 scope.go:117] "RemoveContainer" containerID="743ddc87efc06da47d02b8cad5e61ba2b5c71fb18761848f5cb719610cc67896" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.672062 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.731555 4946 scope.go:117] "RemoveContainer" containerID="793aaf1a1a7437e7e5a4a080f87f02b82e6a64f68c3b9c9cfcb825f04c561997" Dec 04 15:29:24 crc kubenswrapper[4946]: I1204 15:29:24.793774 4946 scope.go:117] "RemoveContainer" containerID="4af82c0e7141a08c616b14294ff018b51e2011189f2e35b47b77c81800165efa" Dec 04 15:29:25 crc kubenswrapper[4946]: I1204 15:29:25.232850 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 15:29:25 crc kubenswrapper[4946]: I1204 15:29:25.336665 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-sl69k"] Dec 04 15:29:25 crc kubenswrapper[4946]: I1204 15:29:25.486002 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f65583d1-046b-463a-9101-2074072a94f0" path="/var/lib/kubelet/pods/f65583d1-046b-463a-9101-2074072a94f0/volumes" Dec 04 15:29:25 crc kubenswrapper[4946]: I1204 15:29:25.497039 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 15:29:25 crc kubenswrapper[4946]: I1204 15:29:25.989683 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"655a0ab4-533d-4447-8656-72742f94f4a7","Type":"ContainerStarted","Data":"01824d9faa3f7784a0045d9c32eed0060bc11719df5442117cfa5fded977b472"} Dec 04 15:29:25 crc kubenswrapper[4946]: I1204 15:29:25.991707 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f75f35c6-b58d-471d-9b5e-2d402f3ce92f","Type":"ContainerStarted","Data":"642d65518ce1dfd3a5881a2431261825dcb02c6a6e18d4aec935c072287ff0e7"} Dec 04 15:29:25 crc kubenswrapper[4946]: I1204 15:29:25.995655 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vb4zl" event={"ID":"f7735402-ad72-4b1b-8028-f0a78d9bff4d","Type":"ContainerStarted","Data":"c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24"} Dec 04 15:29:25 crc kubenswrapper[4946]: I1204 15:29:25.996456 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6458626f-136f-475a-b7ad-cf32977e39eb","Type":"ContainerStarted","Data":"11563abde63e3bba58ce6ecbcf8e85b19367dc32909025e93639929c1adbefc9"} Dec 04 15:29:26 crc kubenswrapper[4946]: I1204 15:29:26.000656 4946 generic.go:334] "Generic (PLEG): container finished" podID="f215d2a4-1108-4c98-a338-9bd0e5fe26ad" containerID="ce43636079c283cccba3197818f037a57305d9759c05ee800ce8c2890c411b50" exitCode=0 Dec 04 15:29:26 crc kubenswrapper[4946]: I1204 15:29:26.000701 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" event={"ID":"f215d2a4-1108-4c98-a338-9bd0e5fe26ad","Type":"ContainerDied","Data":"ce43636079c283cccba3197818f037a57305d9759c05ee800ce8c2890c411b50"} Dec 04 15:29:26 crc kubenswrapper[4946]: I1204 15:29:26.000728 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" event={"ID":"f215d2a4-1108-4c98-a338-9bd0e5fe26ad","Type":"ContainerStarted","Data":"dc7e212e1104d5d14f1f4c98af0b7ecf4ec3488ad1f35247cadc677f1a3b4fb0"} Dec 04 15:29:27 crc kubenswrapper[4946]: I1204 15:29:27.011329 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" event={"ID":"f215d2a4-1108-4c98-a338-9bd0e5fe26ad","Type":"ContainerStarted","Data":"de4abe802e2988555ad5857f16a2c250d968aa8209ab09f1df77d293f0ac18f0"} Dec 04 15:29:27 crc kubenswrapper[4946]: I1204 15:29:27.011911 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:27 crc kubenswrapper[4946]: I1204 15:29:27.013858 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"655a0ab4-533d-4447-8656-72742f94f4a7","Type":"ContainerStarted","Data":"9cb31deb8b74ad1b4437582ca63f71d165751337a9b2419ca211f9b14f2650d5"} Dec 04 15:29:27 crc kubenswrapper[4946]: I1204 15:29:27.015326 4946 generic.go:334] "Generic (PLEG): container finished" podID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" containerID="c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24" exitCode=0 Dec 04 15:29:27 crc kubenswrapper[4946]: I1204 15:29:27.015355 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vb4zl" event={"ID":"f7735402-ad72-4b1b-8028-f0a78d9bff4d","Type":"ContainerDied","Data":"c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24"} Dec 04 15:29:27 crc kubenswrapper[4946]: I1204 15:29:27.042209 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" podStartSLOduration=15.042184707 podStartE2EDuration="15.042184707s" podCreationTimestamp="2025-12-04 15:29:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:29:27.031308382 +0000 UTC m=+1617.917352033" watchObservedRunningTime="2025-12-04 15:29:27.042184707 +0000 UTC m=+1617.928228348" Dec 04 15:29:27 crc kubenswrapper[4946]: E1204 15:29:27.836539 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="655a0ab4-533d-4447-8656-72742f94f4a7" Dec 04 15:29:28 crc kubenswrapper[4946]: I1204 15:29:28.030187 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"655a0ab4-533d-4447-8656-72742f94f4a7","Type":"ContainerStarted","Data":"9a4685f50294a03b94859cc6e7ef6950b5a06a6b5f3ef29c599101b0789e244a"} Dec 04 15:29:28 crc kubenswrapper[4946]: I1204 15:29:28.030258 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 15:29:28 crc kubenswrapper[4946]: E1204 15:29:28.031732 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested\\\"\"" pod="openstack/ceilometer-0" podUID="655a0ab4-533d-4447-8656-72742f94f4a7" Dec 04 15:29:28 crc kubenswrapper[4946]: I1204 15:29:28.031748 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f75f35c6-b58d-471d-9b5e-2d402f3ce92f","Type":"ContainerStarted","Data":"40a5c389af2cb85345cab40bb56878518267cad4f47816c51a510216ced5da73"} Dec 04 15:29:28 crc kubenswrapper[4946]: I1204 15:29:28.035894 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vb4zl" event={"ID":"f7735402-ad72-4b1b-8028-f0a78d9bff4d","Type":"ContainerStarted","Data":"0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990"} Dec 04 15:29:28 crc kubenswrapper[4946]: I1204 15:29:28.038766 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6458626f-136f-475a-b7ad-cf32977e39eb","Type":"ContainerStarted","Data":"1e70b385ff0a918772b2b50fd6529ddc346ee88fc069d6d6db09d8bb2710a4f1"} Dec 04 15:29:28 crc kubenswrapper[4946]: I1204 15:29:28.077228 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vb4zl" podStartSLOduration=2.837735587 podStartE2EDuration="26.077212s" podCreationTimestamp="2025-12-04 15:29:02 +0000 UTC" firstStartedPulling="2025-12-04 15:29:04.236879376 +0000 UTC m=+1595.122923017" lastFinishedPulling="2025-12-04 15:29:27.476355799 +0000 UTC m=+1618.362399430" observedRunningTime="2025-12-04 15:29:28.074763994 +0000 UTC m=+1618.960807625" watchObservedRunningTime="2025-12-04 15:29:28.077212 +0000 UTC m=+1618.963255641" Dec 04 15:29:29 crc kubenswrapper[4946]: E1204 15:29:29.051565 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested\\\"\"" pod="openstack/ceilometer-0" podUID="655a0ab4-533d-4447-8656-72742f94f4a7" Dec 04 15:29:32 crc kubenswrapper[4946]: I1204 15:29:32.879390 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:32 crc kubenswrapper[4946]: I1204 15:29:32.970962 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-89fh5"] Dec 04 15:29:32 crc kubenswrapper[4946]: I1204 15:29:32.971415 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" podUID="f6f0046a-c978-4cf7-8199-4617162c1d5f" containerName="dnsmasq-dns" containerID="cri-o://148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22" gracePeriod=10 Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.183277 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-vglk4"] Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.185462 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.198691 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-vglk4"] Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.206500 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.207614 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.276574 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.339635 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbjdc\" (UniqueName: \"kubernetes.io/projected/d984a81d-2489-42fa-b527-8962119b7dc5-kube-api-access-nbjdc\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.339697 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.339765 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-config\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.339804 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-dns-svc\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.339825 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.340111 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.340443 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.442306 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.442401 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbjdc\" (UniqueName: \"kubernetes.io/projected/d984a81d-2489-42fa-b527-8962119b7dc5-kube-api-access-nbjdc\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.442425 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.442483 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-config\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.442513 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-dns-svc\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.442537 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.442598 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.443572 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.443616 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.444209 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-config\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.444803 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-dns-svc\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.444958 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.445341 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d984a81d-2489-42fa-b527-8962119b7dc5-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.524891 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbjdc\" (UniqueName: \"kubernetes.io/projected/d984a81d-2489-42fa-b527-8962119b7dc5-kube-api-access-nbjdc\") pod \"dnsmasq-dns-85f64749dc-vglk4\" (UID: \"d984a81d-2489-42fa-b527-8962119b7dc5\") " pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.743021 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.806476 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.854232 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-svc\") pod \"f6f0046a-c978-4cf7-8199-4617162c1d5f\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.854410 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-nb\") pod \"f6f0046a-c978-4cf7-8199-4617162c1d5f\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.854559 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-config\") pod \"f6f0046a-c978-4cf7-8199-4617162c1d5f\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.854682 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-sb\") pod \"f6f0046a-c978-4cf7-8199-4617162c1d5f\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.854735 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8f59l\" (UniqueName: \"kubernetes.io/projected/f6f0046a-c978-4cf7-8199-4617162c1d5f-kube-api-access-8f59l\") pod \"f6f0046a-c978-4cf7-8199-4617162c1d5f\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.855008 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-swift-storage-0\") pod \"f6f0046a-c978-4cf7-8199-4617162c1d5f\" (UID: \"f6f0046a-c978-4cf7-8199-4617162c1d5f\") " Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.876248 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6f0046a-c978-4cf7-8199-4617162c1d5f-kube-api-access-8f59l" (OuterVolumeSpecName: "kube-api-access-8f59l") pod "f6f0046a-c978-4cf7-8199-4617162c1d5f" (UID: "f6f0046a-c978-4cf7-8199-4617162c1d5f"). InnerVolumeSpecName "kube-api-access-8f59l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.964372 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8f59l\" (UniqueName: \"kubernetes.io/projected/f6f0046a-c978-4cf7-8199-4617162c1d5f-kube-api-access-8f59l\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.971592 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f6f0046a-c978-4cf7-8199-4617162c1d5f" (UID: "f6f0046a-c978-4cf7-8199-4617162c1d5f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.983387 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f6f0046a-c978-4cf7-8199-4617162c1d5f" (UID: "f6f0046a-c978-4cf7-8199-4617162c1d5f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:33 crc kubenswrapper[4946]: I1204 15:29:33.984566 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-config" (OuterVolumeSpecName: "config") pod "f6f0046a-c978-4cf7-8199-4617162c1d5f" (UID: "f6f0046a-c978-4cf7-8199-4617162c1d5f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.019662 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f6f0046a-c978-4cf7-8199-4617162c1d5f" (UID: "f6f0046a-c978-4cf7-8199-4617162c1d5f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.029682 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f6f0046a-c978-4cf7-8199-4617162c1d5f" (UID: "f6f0046a-c978-4cf7-8199-4617162c1d5f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.068021 4946 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.068083 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.068093 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.068104 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.068132 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6f0046a-c978-4cf7-8199-4617162c1d5f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.115664 4946 generic.go:334] "Generic (PLEG): container finished" podID="f6f0046a-c978-4cf7-8199-4617162c1d5f" containerID="148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22" exitCode=0 Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.116745 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.117779 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" event={"ID":"f6f0046a-c978-4cf7-8199-4617162c1d5f","Type":"ContainerDied","Data":"148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22"} Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.117812 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" event={"ID":"f6f0046a-c978-4cf7-8199-4617162c1d5f","Type":"ContainerDied","Data":"fcd5de62096705b2470faf1b89261c4a70dbe7084b1b2c090bfc560a9be9bc95"} Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.117834 4946 scope.go:117] "RemoveContainer" containerID="148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.201390 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.216195 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-89fh5"] Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.228553 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-89fh5"] Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.257458 4946 scope.go:117] "RemoveContainer" containerID="c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.258236 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vb4zl"] Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.282556 4946 scope.go:117] "RemoveContainer" containerID="148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22" Dec 04 15:29:34 crc kubenswrapper[4946]: E1204 15:29:34.284594 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22\": container with ID starting with 148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22 not found: ID does not exist" containerID="148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.284652 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22"} err="failed to get container status \"148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22\": rpc error: code = NotFound desc = could not find container \"148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22\": container with ID starting with 148d20654423a284dd08aea4bd2f0d69aa45d5be8719837fcb52476bd4464a22 not found: ID does not exist" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.284687 4946 scope.go:117] "RemoveContainer" containerID="c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76" Dec 04 15:29:34 crc kubenswrapper[4946]: E1204 15:29:34.285053 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76\": container with ID starting with c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76 not found: ID does not exist" containerID="c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.285096 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76"} err="failed to get container status \"c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76\": rpc error: code = NotFound desc = could not find container \"c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76\": container with ID starting with c2e050cd5c56f1e69d3c11e1738360324a7480a825b297ffdba837d12c27cd76 not found: ID does not exist" Dec 04 15:29:34 crc kubenswrapper[4946]: I1204 15:29:34.433200 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-vglk4"] Dec 04 15:29:35 crc kubenswrapper[4946]: I1204 15:29:35.129956 4946 generic.go:334] "Generic (PLEG): container finished" podID="d984a81d-2489-42fa-b527-8962119b7dc5" containerID="f58db264b112d2276b2aeefa89df06f750ee94b7ee60db1e8fbdd404eae9a77e" exitCode=0 Dec 04 15:29:35 crc kubenswrapper[4946]: I1204 15:29:35.130038 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-vglk4" event={"ID":"d984a81d-2489-42fa-b527-8962119b7dc5","Type":"ContainerDied","Data":"f58db264b112d2276b2aeefa89df06f750ee94b7ee60db1e8fbdd404eae9a77e"} Dec 04 15:29:35 crc kubenswrapper[4946]: I1204 15:29:35.131505 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-vglk4" event={"ID":"d984a81d-2489-42fa-b527-8962119b7dc5","Type":"ContainerStarted","Data":"8749f0765b9b913756b7096b02651bfa9a00ddd321d1244846ee7512ea88f2f4"} Dec 04 15:29:35 crc kubenswrapper[4946]: I1204 15:29:35.464999 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6f0046a-c978-4cf7-8199-4617162c1d5f" path="/var/lib/kubelet/pods/f6f0046a-c978-4cf7-8199-4617162c1d5f/volumes" Dec 04 15:29:36 crc kubenswrapper[4946]: I1204 15:29:36.148271 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-vglk4" event={"ID":"d984a81d-2489-42fa-b527-8962119b7dc5","Type":"ContainerStarted","Data":"f186032388dcacc1d34ecca8529e8a93ac5d520ed5a10fb40345924bc60cde9b"} Dec 04 15:29:36 crc kubenswrapper[4946]: I1204 15:29:36.148396 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vb4zl" podUID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" containerName="registry-server" containerID="cri-o://0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990" gracePeriod=2 Dec 04 15:29:36 crc kubenswrapper[4946]: I1204 15:29:36.184836 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85f64749dc-vglk4" podStartSLOduration=3.184814244 podStartE2EDuration="3.184814244s" podCreationTimestamp="2025-12-04 15:29:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:29:36.170151037 +0000 UTC m=+1627.056194688" watchObservedRunningTime="2025-12-04 15:29:36.184814244 +0000 UTC m=+1627.070857895" Dec 04 15:29:36 crc kubenswrapper[4946]: I1204 15:29:36.799144 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:36 crc kubenswrapper[4946]: I1204 15:29:36.957435 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wg65m\" (UniqueName: \"kubernetes.io/projected/f7735402-ad72-4b1b-8028-f0a78d9bff4d-kube-api-access-wg65m\") pod \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " Dec 04 15:29:36 crc kubenswrapper[4946]: I1204 15:29:36.957519 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-utilities\") pod \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " Dec 04 15:29:36 crc kubenswrapper[4946]: I1204 15:29:36.957618 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-catalog-content\") pod \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\" (UID: \"f7735402-ad72-4b1b-8028-f0a78d9bff4d\") " Dec 04 15:29:36 crc kubenswrapper[4946]: I1204 15:29:36.958564 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-utilities" (OuterVolumeSpecName: "utilities") pod "f7735402-ad72-4b1b-8028-f0a78d9bff4d" (UID: "f7735402-ad72-4b1b-8028-f0a78d9bff4d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:29:36 crc kubenswrapper[4946]: I1204 15:29:36.964521 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7735402-ad72-4b1b-8028-f0a78d9bff4d-kube-api-access-wg65m" (OuterVolumeSpecName: "kube-api-access-wg65m") pod "f7735402-ad72-4b1b-8028-f0a78d9bff4d" (UID: "f7735402-ad72-4b1b-8028-f0a78d9bff4d"). InnerVolumeSpecName "kube-api-access-wg65m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.006673 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f7735402-ad72-4b1b-8028-f0a78d9bff4d" (UID: "f7735402-ad72-4b1b-8028-f0a78d9bff4d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.060658 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.060700 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wg65m\" (UniqueName: \"kubernetes.io/projected/f7735402-ad72-4b1b-8028-f0a78d9bff4d-kube-api-access-wg65m\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.060714 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7735402-ad72-4b1b-8028-f0a78d9bff4d-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.164605 4946 generic.go:334] "Generic (PLEG): container finished" podID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" containerID="0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990" exitCode=0 Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.165289 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vb4zl" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.165306 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vb4zl" event={"ID":"f7735402-ad72-4b1b-8028-f0a78d9bff4d","Type":"ContainerDied","Data":"0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990"} Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.165506 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vb4zl" event={"ID":"f7735402-ad72-4b1b-8028-f0a78d9bff4d","Type":"ContainerDied","Data":"a43f299d03b22e74aaa4ff30c1eda2b57bb2ae5542ea122f53aa2c90fee484e8"} Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.165559 4946 scope.go:117] "RemoveContainer" containerID="0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.165858 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.219366 4946 scope.go:117] "RemoveContainer" containerID="c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.227826 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vb4zl"] Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.250411 4946 scope.go:117] "RemoveContainer" containerID="9a60eee3f3ab381ab2617864f2854400c38a784e9c2761ba66c69fc06157363a" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.250595 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vb4zl"] Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.298206 4946 scope.go:117] "RemoveContainer" containerID="0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990" Dec 04 15:29:37 crc kubenswrapper[4946]: E1204 15:29:37.298675 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990\": container with ID starting with 0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990 not found: ID does not exist" containerID="0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.298714 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990"} err="failed to get container status \"0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990\": rpc error: code = NotFound desc = could not find container \"0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990\": container with ID starting with 0e2c8f3217037637cb2ffaea6c7d3aa2364dcdd26eba7f44e1f092b50fe62990 not found: ID does not exist" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.298741 4946 scope.go:117] "RemoveContainer" containerID="c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24" Dec 04 15:29:37 crc kubenswrapper[4946]: E1204 15:29:37.299010 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24\": container with ID starting with c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24 not found: ID does not exist" containerID="c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.299032 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24"} err="failed to get container status \"c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24\": rpc error: code = NotFound desc = could not find container \"c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24\": container with ID starting with c8e8d9ccf034db70362c4859e99ce1907c66b1ec5985dac0632dbe5a6a977f24 not found: ID does not exist" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.299047 4946 scope.go:117] "RemoveContainer" containerID="9a60eee3f3ab381ab2617864f2854400c38a784e9c2761ba66c69fc06157363a" Dec 04 15:29:37 crc kubenswrapper[4946]: E1204 15:29:37.299306 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a60eee3f3ab381ab2617864f2854400c38a784e9c2761ba66c69fc06157363a\": container with ID starting with 9a60eee3f3ab381ab2617864f2854400c38a784e9c2761ba66c69fc06157363a not found: ID does not exist" containerID="9a60eee3f3ab381ab2617864f2854400c38a784e9c2761ba66c69fc06157363a" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.299331 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a60eee3f3ab381ab2617864f2854400c38a784e9c2761ba66c69fc06157363a"} err="failed to get container status \"9a60eee3f3ab381ab2617864f2854400c38a784e9c2761ba66c69fc06157363a\": rpc error: code = NotFound desc = could not find container \"9a60eee3f3ab381ab2617864f2854400c38a784e9c2761ba66c69fc06157363a\": container with ID starting with 9a60eee3f3ab381ab2617864f2854400c38a784e9c2761ba66c69fc06157363a not found: ID does not exist" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.475515 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" path="/var/lib/kubelet/pods/f7735402-ad72-4b1b-8028-f0a78d9bff4d/volumes" Dec 04 15:29:37 crc kubenswrapper[4946]: I1204 15:29:37.651791 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 04 15:29:38 crc kubenswrapper[4946]: I1204 15:29:38.180177 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-jkwmj" event={"ID":"95ee854c-ccd7-4292-b874-9ce160fc8988","Type":"ContainerStarted","Data":"02920522b5698fb370a57eb9b29a1b82f48322d4c0d4df021001f2a9b13db7a9"} Dec 04 15:29:38 crc kubenswrapper[4946]: I1204 15:29:38.208444 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-jkwmj" podStartSLOduration=1.893518264 podStartE2EDuration="43.208424512s" podCreationTimestamp="2025-12-04 15:28:55 +0000 UTC" firstStartedPulling="2025-12-04 15:28:56.334109296 +0000 UTC m=+1587.220152957" lastFinishedPulling="2025-12-04 15:29:37.649015564 +0000 UTC m=+1628.535059205" observedRunningTime="2025-12-04 15:29:38.19988852 +0000 UTC m=+1629.085932161" watchObservedRunningTime="2025-12-04 15:29:38.208424512 +0000 UTC m=+1629.094468153" Dec 04 15:29:38 crc kubenswrapper[4946]: I1204 15:29:38.405777 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5fd9b586ff-89fh5" podUID="f6f0046a-c978-4cf7-8199-4617162c1d5f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.224:5353: i/o timeout" Dec 04 15:29:39 crc kubenswrapper[4946]: I1204 15:29:39.482559 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 04 15:29:41 crc kubenswrapper[4946]: I1204 15:29:41.213903 4946 generic.go:334] "Generic (PLEG): container finished" podID="95ee854c-ccd7-4292-b874-9ce160fc8988" containerID="02920522b5698fb370a57eb9b29a1b82f48322d4c0d4df021001f2a9b13db7a9" exitCode=0 Dec 04 15:29:41 crc kubenswrapper[4946]: I1204 15:29:41.214754 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-jkwmj" event={"ID":"95ee854c-ccd7-4292-b874-9ce160fc8988","Type":"ContainerDied","Data":"02920522b5698fb370a57eb9b29a1b82f48322d4c0d4df021001f2a9b13db7a9"} Dec 04 15:29:41 crc kubenswrapper[4946]: I1204 15:29:41.219574 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"655a0ab4-533d-4447-8656-72742f94f4a7","Type":"ContainerStarted","Data":"aa86838122b5b40db992d2775a27bfdd8738b3ebf3e986735789ab44725e7f4d"} Dec 04 15:29:41 crc kubenswrapper[4946]: I1204 15:29:41.249838 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.392583617 podStartE2EDuration="42.249818554s" podCreationTimestamp="2025-12-04 15:28:59 +0000 UTC" firstStartedPulling="2025-12-04 15:29:00.514996103 +0000 UTC m=+1591.401039744" lastFinishedPulling="2025-12-04 15:29:40.37223104 +0000 UTC m=+1631.258274681" observedRunningTime="2025-12-04 15:29:41.248519529 +0000 UTC m=+1632.134563170" watchObservedRunningTime="2025-12-04 15:29:41.249818554 +0000 UTC m=+1632.135862195" Dec 04 15:29:41 crc kubenswrapper[4946]: I1204 15:29:41.893995 4946 scope.go:117] "RemoveContainer" containerID="0989b2d5faf284dfd2ca0a025fb876dc5ed596110fd952874fcc8ba8bb3cbca0" Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.746266 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.768389 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-scripts\") pod \"95ee854c-ccd7-4292-b874-9ce160fc8988\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.768521 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8cd4\" (UniqueName: \"kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-kube-api-access-l8cd4\") pod \"95ee854c-ccd7-4292-b874-9ce160fc8988\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.768577 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-combined-ca-bundle\") pod \"95ee854c-ccd7-4292-b874-9ce160fc8988\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.768661 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-certs\") pod \"95ee854c-ccd7-4292-b874-9ce160fc8988\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.769205 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-config-data\") pod \"95ee854c-ccd7-4292-b874-9ce160fc8988\" (UID: \"95ee854c-ccd7-4292-b874-9ce160fc8988\") " Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.776187 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-kube-api-access-l8cd4" (OuterVolumeSpecName: "kube-api-access-l8cd4") pod "95ee854c-ccd7-4292-b874-9ce160fc8988" (UID: "95ee854c-ccd7-4292-b874-9ce160fc8988"). InnerVolumeSpecName "kube-api-access-l8cd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.777044 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-certs" (OuterVolumeSpecName: "certs") pod "95ee854c-ccd7-4292-b874-9ce160fc8988" (UID: "95ee854c-ccd7-4292-b874-9ce160fc8988"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.778282 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-scripts" (OuterVolumeSpecName: "scripts") pod "95ee854c-ccd7-4292-b874-9ce160fc8988" (UID: "95ee854c-ccd7-4292-b874-9ce160fc8988"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.801280 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "95ee854c-ccd7-4292-b874-9ce160fc8988" (UID: "95ee854c-ccd7-4292-b874-9ce160fc8988"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.820262 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-config-data" (OuterVolumeSpecName: "config-data") pod "95ee854c-ccd7-4292-b874-9ce160fc8988" (UID: "95ee854c-ccd7-4292-b874-9ce160fc8988"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.873264 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.873312 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8cd4\" (UniqueName: \"kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-kube-api-access-l8cd4\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.873331 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.873342 4946 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/95ee854c-ccd7-4292-b874-9ce160fc8988-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:42 crc kubenswrapper[4946]: I1204 15:29:42.873351 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95ee854c-ccd7-4292-b874-9ce160fc8988-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.238849 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-jkwmj" event={"ID":"95ee854c-ccd7-4292-b874-9ce160fc8988","Type":"ContainerDied","Data":"cce95cddbce0624e9897efda403a741c4a359436939488d322c41df80ae415d8"} Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.239320 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cce95cddbce0624e9897efda403a741c4a359436939488d322c41df80ae415d8" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.238897 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-jkwmj" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.320349 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-7qkz8"] Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.331202 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-7qkz8"] Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.429788 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-mfcpf"] Dec 04 15:29:43 crc kubenswrapper[4946]: E1204 15:29:43.430381 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" containerName="registry-server" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.430406 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" containerName="registry-server" Dec 04 15:29:43 crc kubenswrapper[4946]: E1204 15:29:43.430436 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6f0046a-c978-4cf7-8199-4617162c1d5f" containerName="dnsmasq-dns" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.430446 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6f0046a-c978-4cf7-8199-4617162c1d5f" containerName="dnsmasq-dns" Dec 04 15:29:43 crc kubenswrapper[4946]: E1204 15:29:43.430473 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" containerName="extract-utilities" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.430482 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" containerName="extract-utilities" Dec 04 15:29:43 crc kubenswrapper[4946]: E1204 15:29:43.430489 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" containerName="extract-content" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.430497 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" containerName="extract-content" Dec 04 15:29:43 crc kubenswrapper[4946]: E1204 15:29:43.430514 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6f0046a-c978-4cf7-8199-4617162c1d5f" containerName="init" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.430519 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6f0046a-c978-4cf7-8199-4617162c1d5f" containerName="init" Dec 04 15:29:43 crc kubenswrapper[4946]: E1204 15:29:43.430542 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95ee854c-ccd7-4292-b874-9ce160fc8988" containerName="cloudkitty-db-sync" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.430548 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="95ee854c-ccd7-4292-b874-9ce160fc8988" containerName="cloudkitty-db-sync" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.430743 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6f0046a-c978-4cf7-8199-4617162c1d5f" containerName="dnsmasq-dns" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.430787 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="95ee854c-ccd7-4292-b874-9ce160fc8988" containerName="cloudkitty-db-sync" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.430803 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7735402-ad72-4b1b-8028-f0a78d9bff4d" containerName="registry-server" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.431758 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.441475 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.478413 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4277efcb-7f69-4cb2-9999-09d884c5b706" path="/var/lib/kubelet/pods/4277efcb-7f69-4cb2-9999-09d884c5b706/volumes" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.479616 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-mfcpf"] Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.487857 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfrv4\" (UniqueName: \"kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-kube-api-access-jfrv4\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.487914 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-config-data\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.488190 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-combined-ca-bundle\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.488340 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-scripts\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.488570 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-certs\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.590761 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-combined-ca-bundle\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.590858 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-scripts\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.590892 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-certs\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.590957 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfrv4\" (UniqueName: \"kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-kube-api-access-jfrv4\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.590974 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-config-data\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.595455 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-config-data\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.595915 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-certs\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.596413 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-combined-ca-bundle\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.597477 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-scripts\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.606864 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfrv4\" (UniqueName: \"kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-kube-api-access-jfrv4\") pod \"cloudkitty-storageinit-mfcpf\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.758813 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.808224 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85f64749dc-vglk4" Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.928903 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-sl69k"] Dec 04 15:29:43 crc kubenswrapper[4946]: I1204 15:29:43.929750 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" podUID="f215d2a4-1108-4c98-a338-9bd0e5fe26ad" containerName="dnsmasq-dns" containerID="cri-o://de4abe802e2988555ad5857f16a2c250d968aa8209ab09f1df77d293f0ac18f0" gracePeriod=10 Dec 04 15:29:44 crc kubenswrapper[4946]: I1204 15:29:44.267568 4946 generic.go:334] "Generic (PLEG): container finished" podID="f215d2a4-1108-4c98-a338-9bd0e5fe26ad" containerID="de4abe802e2988555ad5857f16a2c250d968aa8209ab09f1df77d293f0ac18f0" exitCode=0 Dec 04 15:29:44 crc kubenswrapper[4946]: I1204 15:29:44.267643 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" event={"ID":"f215d2a4-1108-4c98-a338-9bd0e5fe26ad","Type":"ContainerDied","Data":"de4abe802e2988555ad5857f16a2c250d968aa8209ab09f1df77d293f0ac18f0"} Dec 04 15:29:44 crc kubenswrapper[4946]: I1204 15:29:44.459219 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-mfcpf"] Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.121284 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.155477 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-sb\") pod \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.155698 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-openstack-edpm-ipam\") pod \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.155737 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfhzl\" (UniqueName: \"kubernetes.io/projected/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-kube-api-access-lfhzl\") pod \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.155782 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-nb\") pod \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.156929 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-swift-storage-0\") pod \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.156991 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-svc\") pod \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.157077 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-config\") pod \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\" (UID: \"f215d2a4-1108-4c98-a338-9bd0e5fe26ad\") " Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.184736 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-kube-api-access-lfhzl" (OuterVolumeSpecName: "kube-api-access-lfhzl") pod "f215d2a4-1108-4c98-a338-9bd0e5fe26ad" (UID: "f215d2a4-1108-4c98-a338-9bd0e5fe26ad"). InnerVolumeSpecName "kube-api-access-lfhzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.261084 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfhzl\" (UniqueName: \"kubernetes.io/projected/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-kube-api-access-lfhzl\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.276781 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f215d2a4-1108-4c98-a338-9bd0e5fe26ad" (UID: "f215d2a4-1108-4c98-a338-9bd0e5fe26ad"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.285296 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "f215d2a4-1108-4c98-a338-9bd0e5fe26ad" (UID: "f215d2a4-1108-4c98-a338-9bd0e5fe26ad"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.287270 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-config" (OuterVolumeSpecName: "config") pod "f215d2a4-1108-4c98-a338-9bd0e5fe26ad" (UID: "f215d2a4-1108-4c98-a338-9bd0e5fe26ad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.288261 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f215d2a4-1108-4c98-a338-9bd0e5fe26ad" (UID: "f215d2a4-1108-4c98-a338-9bd0e5fe26ad"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.296389 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f215d2a4-1108-4c98-a338-9bd0e5fe26ad" (UID: "f215d2a4-1108-4c98-a338-9bd0e5fe26ad"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.301829 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f215d2a4-1108-4c98-a338-9bd0e5fe26ad" (UID: "f215d2a4-1108-4c98-a338-9bd0e5fe26ad"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.328177 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-mfcpf" event={"ID":"534da337-2f9e-42db-b58d-48d43ca79b6d","Type":"ContainerStarted","Data":"80317987ff1a1e6d9330e502866b5342b36119911829f193cf4011a93906e03d"} Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.328235 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-mfcpf" event={"ID":"534da337-2f9e-42db-b58d-48d43ca79b6d","Type":"ContainerStarted","Data":"84f8074e7b549de66705beef3a36d2a8271ba759742082b134d7175eb45c50b4"} Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.337448 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" event={"ID":"f215d2a4-1108-4c98-a338-9bd0e5fe26ad","Type":"ContainerDied","Data":"dc7e212e1104d5d14f1f4c98af0b7ecf4ec3488ad1f35247cadc677f1a3b4fb0"} Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.337528 4946 scope.go:117] "RemoveContainer" containerID="de4abe802e2988555ad5857f16a2c250d968aa8209ab09f1df77d293f0ac18f0" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.337849 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-sl69k" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.363968 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-mfcpf" podStartSLOduration=2.363940672 podStartE2EDuration="2.363940672s" podCreationTimestamp="2025-12-04 15:29:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:29:45.354968449 +0000 UTC m=+1636.241012090" watchObservedRunningTime="2025-12-04 15:29:45.363940672 +0000 UTC m=+1636.249984313" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.369750 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.369786 4946 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.369797 4946 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.369807 4946 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.369818 4946 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.369830 4946 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f215d2a4-1108-4c98-a338-9bd0e5fe26ad-config\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.383172 4946 scope.go:117] "RemoveContainer" containerID="ce43636079c283cccba3197818f037a57305d9759c05ee800ce8c2890c411b50" Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.412201 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-sl69k"] Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.433092 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-sl69k"] Dec 04 15:29:45 crc kubenswrapper[4946]: I1204 15:29:45.473078 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f215d2a4-1108-4c98-a338-9bd0e5fe26ad" path="/var/lib/kubelet/pods/f215d2a4-1108-4c98-a338-9bd0e5fe26ad/volumes" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.364242 4946 generic.go:334] "Generic (PLEG): container finished" podID="534da337-2f9e-42db-b58d-48d43ca79b6d" containerID="80317987ff1a1e6d9330e502866b5342b36119911829f193cf4011a93906e03d" exitCode=0 Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.364548 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-mfcpf" event={"ID":"534da337-2f9e-42db-b58d-48d43ca79b6d","Type":"ContainerDied","Data":"80317987ff1a1e6d9330e502866b5342b36119911829f193cf4011a93906e03d"} Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.627556 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b"] Dec 04 15:29:47 crc kubenswrapper[4946]: E1204 15:29:47.628004 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f215d2a4-1108-4c98-a338-9bd0e5fe26ad" containerName="init" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.628017 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f215d2a4-1108-4c98-a338-9bd0e5fe26ad" containerName="init" Dec 04 15:29:47 crc kubenswrapper[4946]: E1204 15:29:47.628033 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f215d2a4-1108-4c98-a338-9bd0e5fe26ad" containerName="dnsmasq-dns" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.628039 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f215d2a4-1108-4c98-a338-9bd0e5fe26ad" containerName="dnsmasq-dns" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.630086 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f215d2a4-1108-4c98-a338-9bd0e5fe26ad" containerName="dnsmasq-dns" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.630889 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.633626 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.633778 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.634535 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.638178 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.646269 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b"] Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.722588 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcxhj\" (UniqueName: \"kubernetes.io/projected/cc2c7406-87e9-4da5-b99c-845bddf4a05b-kube-api-access-gcxhj\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.722876 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.723106 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.723257 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.824771 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.824893 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcxhj\" (UniqueName: \"kubernetes.io/projected/cc2c7406-87e9-4da5-b99c-845bddf4a05b-kube-api-access-gcxhj\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.824982 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.825061 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.830968 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.834349 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.835153 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.843435 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcxhj\" (UniqueName: \"kubernetes.io/projected/cc2c7406-87e9-4da5-b99c-845bddf4a05b-kube-api-access-gcxhj\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:47 crc kubenswrapper[4946]: I1204 15:29:47.950966 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.591086 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b"] Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.591541 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.748231 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.844816 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-certs\") pod \"534da337-2f9e-42db-b58d-48d43ca79b6d\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.844914 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-combined-ca-bundle\") pod \"534da337-2f9e-42db-b58d-48d43ca79b6d\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.845154 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfrv4\" (UniqueName: \"kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-kube-api-access-jfrv4\") pod \"534da337-2f9e-42db-b58d-48d43ca79b6d\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.845239 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-config-data\") pod \"534da337-2f9e-42db-b58d-48d43ca79b6d\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.845261 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-scripts\") pod \"534da337-2f9e-42db-b58d-48d43ca79b6d\" (UID: \"534da337-2f9e-42db-b58d-48d43ca79b6d\") " Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.851908 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-scripts" (OuterVolumeSpecName: "scripts") pod "534da337-2f9e-42db-b58d-48d43ca79b6d" (UID: "534da337-2f9e-42db-b58d-48d43ca79b6d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.853807 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-kube-api-access-jfrv4" (OuterVolumeSpecName: "kube-api-access-jfrv4") pod "534da337-2f9e-42db-b58d-48d43ca79b6d" (UID: "534da337-2f9e-42db-b58d-48d43ca79b6d"). InnerVolumeSpecName "kube-api-access-jfrv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.855248 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-certs" (OuterVolumeSpecName: "certs") pod "534da337-2f9e-42db-b58d-48d43ca79b6d" (UID: "534da337-2f9e-42db-b58d-48d43ca79b6d"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.884342 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-config-data" (OuterVolumeSpecName: "config-data") pod "534da337-2f9e-42db-b58d-48d43ca79b6d" (UID: "534da337-2f9e-42db-b58d-48d43ca79b6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.887480 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "534da337-2f9e-42db-b58d-48d43ca79b6d" (UID: "534da337-2f9e-42db-b58d-48d43ca79b6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.947258 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.947731 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfrv4\" (UniqueName: \"kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-kube-api-access-jfrv4\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.947749 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.947764 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/534da337-2f9e-42db-b58d-48d43ca79b6d-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:48 crc kubenswrapper[4946]: I1204 15:29:48.947778 4946 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/534da337-2f9e-42db-b58d-48d43ca79b6d-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:49 crc kubenswrapper[4946]: I1204 15:29:49.436965 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-mfcpf" event={"ID":"534da337-2f9e-42db-b58d-48d43ca79b6d","Type":"ContainerDied","Data":"84f8074e7b549de66705beef3a36d2a8271ba759742082b134d7175eb45c50b4"} Dec 04 15:29:49 crc kubenswrapper[4946]: I1204 15:29:49.437019 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84f8074e7b549de66705beef3a36d2a8271ba759742082b134d7175eb45c50b4" Dec 04 15:29:49 crc kubenswrapper[4946]: I1204 15:29:49.437166 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-mfcpf" Dec 04 15:29:49 crc kubenswrapper[4946]: I1204 15:29:49.444348 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" event={"ID":"cc2c7406-87e9-4da5-b99c-845bddf4a05b","Type":"ContainerStarted","Data":"2219463c6ccb9d2ae116096fce378d5b66406476079ff2beaf1a064478487270"} Dec 04 15:29:49 crc kubenswrapper[4946]: I1204 15:29:49.530719 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:29:49 crc kubenswrapper[4946]: I1204 15:29:49.531033 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" containerName="cloudkitty-api-log" containerID="cri-o://20c6a5c355ad98a2f16639306aa8b40b10417c10db02f2eabdd0e1c6b4cb4c19" gracePeriod=30 Dec 04 15:29:49 crc kubenswrapper[4946]: I1204 15:29:49.531825 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" containerName="cloudkitty-api" containerID="cri-o://eef3b33fa7fae5fe286a436c2342eeaa25e665d5c4f013a1896cca4e902fff38" gracePeriod=30 Dec 04 15:29:49 crc kubenswrapper[4946]: I1204 15:29:49.552455 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:29:49 crc kubenswrapper[4946]: I1204 15:29:49.552799 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" containerName="cloudkitty-proc" containerID="cri-o://dd5e41737b51f633aaca90f9ecaa0b928cce92138cf784d39258af4f082fc0f5" gracePeriod=30 Dec 04 15:29:50 crc kubenswrapper[4946]: I1204 15:29:50.482680 4946 generic.go:334] "Generic (PLEG): container finished" podID="b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" containerID="dd5e41737b51f633aaca90f9ecaa0b928cce92138cf784d39258af4f082fc0f5" exitCode=0 Dec 04 15:29:50 crc kubenswrapper[4946]: I1204 15:29:50.483388 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8","Type":"ContainerDied","Data":"dd5e41737b51f633aaca90f9ecaa0b928cce92138cf784d39258af4f082fc0f5"} Dec 04 15:29:50 crc kubenswrapper[4946]: I1204 15:29:50.498708 4946 generic.go:334] "Generic (PLEG): container finished" podID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" containerID="20c6a5c355ad98a2f16639306aa8b40b10417c10db02f2eabdd0e1c6b4cb4c19" exitCode=143 Dec 04 15:29:50 crc kubenswrapper[4946]: I1204 15:29:50.498808 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e","Type":"ContainerDied","Data":"20c6a5c355ad98a2f16639306aa8b40b10417c10db02f2eabdd0e1c6b4cb4c19"} Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.050354 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.145397 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbpz5\" (UniqueName: \"kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-kube-api-access-gbpz5\") pod \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.145464 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data-custom\") pod \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.145533 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data\") pod \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.145584 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-scripts\") pod \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.145668 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-combined-ca-bundle\") pod \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.145727 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-certs\") pod \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\" (UID: \"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.153367 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" (UID: "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.153684 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-certs" (OuterVolumeSpecName: "certs") pod "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" (UID: "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.153997 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-kube-api-access-gbpz5" (OuterVolumeSpecName: "kube-api-access-gbpz5") pod "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" (UID: "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8"). InnerVolumeSpecName "kube-api-access-gbpz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.170927 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-api-0" podUID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" containerName="cloudkitty-api" probeResult="failure" output="Get \"https://10.217.0.188:8889/healthcheck\": read tcp 10.217.0.2:51206->10.217.0.188:8889: read: connection reset by peer" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.182305 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-scripts" (OuterVolumeSpecName: "scripts") pod "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" (UID: "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.193243 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data" (OuterVolumeSpecName: "config-data") pod "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" (UID: "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.197148 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" (UID: "b21e37e6-3f4e-497d-9b6f-1f11f9d284a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.248546 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.248591 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.248603 4946 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.248611 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbpz5\" (UniqueName: \"kubernetes.io/projected/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-kube-api-access-gbpz5\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.248620 4946 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.248631 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.519806 4946 generic.go:334] "Generic (PLEG): container finished" podID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" containerID="eef3b33fa7fae5fe286a436c2342eeaa25e665d5c4f013a1896cca4e902fff38" exitCode=0 Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.519914 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e","Type":"ContainerDied","Data":"eef3b33fa7fae5fe286a436c2342eeaa25e665d5c4f013a1896cca4e902fff38"} Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.528499 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"b21e37e6-3f4e-497d-9b6f-1f11f9d284a8","Type":"ContainerDied","Data":"946aab3ec23fa8abc17be2005dc05ac84deb8aa761aff5fe6ca065b532ea9b69"} Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.528565 4946 scope.go:117] "RemoveContainer" containerID="dd5e41737b51f633aaca90f9ecaa0b928cce92138cf784d39258af4f082fc0f5" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.528784 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.595193 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.608580 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.619437 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:29:51 crc kubenswrapper[4946]: E1204 15:29:51.620251 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" containerName="cloudkitty-proc" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.620273 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" containerName="cloudkitty-proc" Dec 04 15:29:51 crc kubenswrapper[4946]: E1204 15:29:51.620312 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="534da337-2f9e-42db-b58d-48d43ca79b6d" containerName="cloudkitty-storageinit" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.620323 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="534da337-2f9e-42db-b58d-48d43ca79b6d" containerName="cloudkitty-storageinit" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.620610 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="534da337-2f9e-42db-b58d-48d43ca79b6d" containerName="cloudkitty-storageinit" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.620638 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" containerName="cloudkitty-proc" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.621979 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.628739 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.635085 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.678026 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.678181 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-config-data\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.678279 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bbht\" (UniqueName: \"kubernetes.io/projected/25621d99-0fe9-42fe-a800-08160c4740aa-kube-api-access-7bbht\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.678338 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-scripts\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.678365 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/25621d99-0fe9-42fe-a800-08160c4740aa-certs\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.678427 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.733575 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.780656 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-config-data\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.780751 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bbht\" (UniqueName: \"kubernetes.io/projected/25621d99-0fe9-42fe-a800-08160c4740aa-kube-api-access-7bbht\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.780792 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-scripts\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.780807 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/25621d99-0fe9-42fe-a800-08160c4740aa-certs\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.780883 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.780945 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.790463 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.796544 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-config-data\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.799973 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.800811 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/25621d99-0fe9-42fe-a800-08160c4740aa-certs\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.810102 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25621d99-0fe9-42fe-a800-08160c4740aa-scripts\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.819152 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bbht\" (UniqueName: \"kubernetes.io/projected/25621d99-0fe9-42fe-a800-08160c4740aa-kube-api-access-7bbht\") pod \"cloudkitty-proc-0\" (UID: \"25621d99-0fe9-42fe-a800-08160c4740aa\") " pod="openstack/cloudkitty-proc-0" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.886463 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-logs\") pod \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.886598 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data-custom\") pod \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.886647 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-public-tls-certs\") pod \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.886777 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data\") pod \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.886987 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-logs" (OuterVolumeSpecName: "logs") pod "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" (UID: "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.886866 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njv2d\" (UniqueName: \"kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-kube-api-access-njv2d\") pod \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.887450 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-scripts\") pod \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.887594 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-certs\") pod \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.887653 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-internal-tls-certs\") pod \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.887787 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-combined-ca-bundle\") pod \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\" (UID: \"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e\") " Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.889146 4946 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-logs\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.893600 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" (UID: "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.896087 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-certs" (OuterVolumeSpecName: "certs") pod "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" (UID: "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.897731 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-scripts" (OuterVolumeSpecName: "scripts") pod "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" (UID: "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.900447 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-kube-api-access-njv2d" (OuterVolumeSpecName: "kube-api-access-njv2d") pod "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" (UID: "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e"). InnerVolumeSpecName "kube-api-access-njv2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.953064 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data" (OuterVolumeSpecName: "config-data") pod "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" (UID: "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.992198 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" (UID: "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.998373 4946 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.998422 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.998441 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njv2d\" (UniqueName: \"kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-kube-api-access-njv2d\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.998462 4946 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.998476 4946 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:51 crc kubenswrapper[4946]: I1204 15:29:51.998492 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.010404 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" (UID: "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.026925 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" (UID: "b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.035179 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.102302 4946 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.102760 4946 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.479154 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.479248 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.479336 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.482411 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.482978 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" gracePeriod=600 Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.592371 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e","Type":"ContainerDied","Data":"ae34b3ada987931401267c423cd54fb8fdae51a8c23a4b9495543cfc56796f5e"} Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.592898 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.592946 4946 scope.go:117] "RemoveContainer" containerID="eef3b33fa7fae5fe286a436c2342eeaa25e665d5c4f013a1896cca4e902fff38" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.598303 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.662988 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.679157 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.717370 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:29:52 crc kubenswrapper[4946]: E1204 15:29:52.719781 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" containerName="cloudkitty-api" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.719806 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" containerName="cloudkitty-api" Dec 04 15:29:52 crc kubenswrapper[4946]: E1204 15:29:52.719831 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" containerName="cloudkitty-api-log" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.719838 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" containerName="cloudkitty-api-log" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.720110 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" containerName="cloudkitty-api" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.720143 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" containerName="cloudkitty-api-log" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.723988 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.731020 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.731531 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.731852 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.741644 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.922683 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc6p7\" (UniqueName: \"kubernetes.io/projected/602d77a3-3d2b-488d-ac47-74d9fd037d6c-kube-api-access-nc6p7\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.923610 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/602d77a3-3d2b-488d-ac47-74d9fd037d6c-logs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.923755 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.923830 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-scripts\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.923939 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/602d77a3-3d2b-488d-ac47-74d9fd037d6c-certs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.923970 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-config-data\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.924153 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.924337 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:52 crc kubenswrapper[4946]: I1204 15:29:52.924471 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.026555 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/602d77a3-3d2b-488d-ac47-74d9fd037d6c-logs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.026634 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.026684 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-scripts\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.026726 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/602d77a3-3d2b-488d-ac47-74d9fd037d6c-certs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.026751 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-config-data\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.026802 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.026864 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.026910 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.026989 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc6p7\" (UniqueName: \"kubernetes.io/projected/602d77a3-3d2b-488d-ac47-74d9fd037d6c-kube-api-access-nc6p7\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.027093 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/602d77a3-3d2b-488d-ac47-74d9fd037d6c-logs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.034600 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.034803 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.035360 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-scripts\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.035792 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.037183 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-config-data\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.040532 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/602d77a3-3d2b-488d-ac47-74d9fd037d6c-certs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.040868 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/602d77a3-3d2b-488d-ac47-74d9fd037d6c-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.046728 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc6p7\" (UniqueName: \"kubernetes.io/projected/602d77a3-3d2b-488d-ac47-74d9fd037d6c-kube-api-access-nc6p7\") pod \"cloudkitty-api-0\" (UID: \"602d77a3-3d2b-488d-ac47-74d9fd037d6c\") " pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.056021 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.468170 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b21e37e6-3f4e-497d-9b6f-1f11f9d284a8" path="/var/lib/kubelet/pods/b21e37e6-3f4e-497d-9b6f-1f11f9d284a8/volumes" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.469487 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e" path="/var/lib/kubelet/pods/b7d0e1fc-e6f0-4714-9502-f9d39d8ca42e/volumes" Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.608558 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" exitCode=0 Dec 04 15:29:53 crc kubenswrapper[4946]: I1204 15:29:53.608622 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f"} Dec 04 15:29:54 crc kubenswrapper[4946]: E1204 15:29:54.353244 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:29:54 crc kubenswrapper[4946]: W1204 15:29:54.354528 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25621d99_0fe9_42fe_a800_08160c4740aa.slice/crio-3f70698f1e8e7935ac1ae109d677b9caae0f63f4477bd797954e8572a401aa1f WatchSource:0}: Error finding container 3f70698f1e8e7935ac1ae109d677b9caae0f63f4477bd797954e8572a401aa1f: Status 404 returned error can't find the container with id 3f70698f1e8e7935ac1ae109d677b9caae0f63f4477bd797954e8572a401aa1f Dec 04 15:29:54 crc kubenswrapper[4946]: I1204 15:29:54.374293 4946 scope.go:117] "RemoveContainer" containerID="20c6a5c355ad98a2f16639306aa8b40b10417c10db02f2eabdd0e1c6b4cb4c19" Dec 04 15:29:54 crc kubenswrapper[4946]: I1204 15:29:54.465165 4946 scope.go:117] "RemoveContainer" containerID="a4672e78e5cc3d2ad9bab9f7368c2628b00d850ecbb6c4792dcaf037af3ed10c" Dec 04 15:29:54 crc kubenswrapper[4946]: I1204 15:29:54.625339 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:29:54 crc kubenswrapper[4946]: E1204 15:29:54.626992 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:29:54 crc kubenswrapper[4946]: I1204 15:29:54.633314 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"25621d99-0fe9-42fe-a800-08160c4740aa","Type":"ContainerStarted","Data":"3f70698f1e8e7935ac1ae109d677b9caae0f63f4477bd797954e8572a401aa1f"} Dec 04 15:29:54 crc kubenswrapper[4946]: I1204 15:29:54.858976 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 04 15:29:54 crc kubenswrapper[4946]: W1204 15:29:54.868857 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod602d77a3_3d2b_488d_ac47_74d9fd037d6c.slice/crio-047e7439173cfca48979cdc42e16fdf990fea7407ee4dceb350d0be0d47a7785 WatchSource:0}: Error finding container 047e7439173cfca48979cdc42e16fdf990fea7407ee4dceb350d0be0d47a7785: Status 404 returned error can't find the container with id 047e7439173cfca48979cdc42e16fdf990fea7407ee4dceb350d0be0d47a7785 Dec 04 15:29:55 crc kubenswrapper[4946]: I1204 15:29:55.646080 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"602d77a3-3d2b-488d-ac47-74d9fd037d6c","Type":"ContainerStarted","Data":"05683793de5bbe41063a31caaa4b3618775b0ff626ec6711162cb3c6407bd73b"} Dec 04 15:29:55 crc kubenswrapper[4946]: I1204 15:29:55.646528 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"602d77a3-3d2b-488d-ac47-74d9fd037d6c","Type":"ContainerStarted","Data":"047e7439173cfca48979cdc42e16fdf990fea7407ee4dceb350d0be0d47a7785"} Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.223180 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv"] Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.226680 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.231583 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.231985 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.256426 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv"] Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.427583 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98qbj\" (UniqueName: \"kubernetes.io/projected/a29898d0-c1ba-435d-b43c-337236b65e84-kube-api-access-98qbj\") pod \"collect-profiles-29414370-x5tjv\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.429390 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a29898d0-c1ba-435d-b43c-337236b65e84-config-volume\") pod \"collect-profiles-29414370-x5tjv\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.429829 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a29898d0-c1ba-435d-b43c-337236b65e84-secret-volume\") pod \"collect-profiles-29414370-x5tjv\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.531986 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98qbj\" (UniqueName: \"kubernetes.io/projected/a29898d0-c1ba-435d-b43c-337236b65e84-kube-api-access-98qbj\") pod \"collect-profiles-29414370-x5tjv\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.532127 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a29898d0-c1ba-435d-b43c-337236b65e84-config-volume\") pod \"collect-profiles-29414370-x5tjv\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.532222 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a29898d0-c1ba-435d-b43c-337236b65e84-secret-volume\") pod \"collect-profiles-29414370-x5tjv\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.535671 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a29898d0-c1ba-435d-b43c-337236b65e84-config-volume\") pod \"collect-profiles-29414370-x5tjv\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.579259 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a29898d0-c1ba-435d-b43c-337236b65e84-secret-volume\") pod \"collect-profiles-29414370-x5tjv\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.579320 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98qbj\" (UniqueName: \"kubernetes.io/projected/a29898d0-c1ba-435d-b43c-337236b65e84-kube-api-access-98qbj\") pod \"collect-profiles-29414370-x5tjv\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.733607 4946 generic.go:334] "Generic (PLEG): container finished" podID="f75f35c6-b58d-471d-9b5e-2d402f3ce92f" containerID="40a5c389af2cb85345cab40bb56878518267cad4f47816c51a510216ced5da73" exitCode=0 Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.733662 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f75f35c6-b58d-471d-9b5e-2d402f3ce92f","Type":"ContainerDied","Data":"40a5c389af2cb85345cab40bb56878518267cad4f47816c51a510216ced5da73"} Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.739726 4946 generic.go:334] "Generic (PLEG): container finished" podID="6458626f-136f-475a-b7ad-cf32977e39eb" containerID="1e70b385ff0a918772b2b50fd6529ddc346ee88fc069d6d6db09d8bb2710a4f1" exitCode=0 Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.739788 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6458626f-136f-475a-b7ad-cf32977e39eb","Type":"ContainerDied","Data":"1e70b385ff0a918772b2b50fd6529ddc346ee88fc069d6d6db09d8bb2710a4f1"} Dec 04 15:30:00 crc kubenswrapper[4946]: I1204 15:30:00.856284 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:04 crc kubenswrapper[4946]: I1204 15:30:04.799779 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6458626f-136f-475a-b7ad-cf32977e39eb","Type":"ContainerStarted","Data":"cea04dfb442264cc24a223c66a99e3849e75bd0d5640d05fc931427e28d38d62"} Dec 04 15:30:04 crc kubenswrapper[4946]: I1204 15:30:04.800903 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:30:04 crc kubenswrapper[4946]: I1204 15:30:04.805478 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" event={"ID":"cc2c7406-87e9-4da5-b99c-845bddf4a05b","Type":"ContainerStarted","Data":"dac708a8094fa81c57f6bab2d81e8ab7bc5f1b2c6ff658859a2a12e040cfeb3e"} Dec 04 15:30:04 crc kubenswrapper[4946]: I1204 15:30:04.816256 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f75f35c6-b58d-471d-9b5e-2d402f3ce92f","Type":"ContainerStarted","Data":"f981dd39155f87a27a17aab6acad096f2169b27f38ebd5cadbe55c6c1823db7b"} Dec 04 15:30:04 crc kubenswrapper[4946]: I1204 15:30:04.817431 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 04 15:30:04 crc kubenswrapper[4946]: I1204 15:30:04.835106 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=40.835079553 podStartE2EDuration="40.835079553s" podCreationTimestamp="2025-12-04 15:29:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:30:04.82392172 +0000 UTC m=+1655.709965361" watchObservedRunningTime="2025-12-04 15:30:04.835079553 +0000 UTC m=+1655.721123194" Dec 04 15:30:04 crc kubenswrapper[4946]: I1204 15:30:04.870521 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" podStartSLOduration=2.163553462 podStartE2EDuration="17.870490653s" podCreationTimestamp="2025-12-04 15:29:47 +0000 UTC" firstStartedPulling="2025-12-04 15:29:48.591299747 +0000 UTC m=+1639.477343388" lastFinishedPulling="2025-12-04 15:30:04.298236938 +0000 UTC m=+1655.184280579" observedRunningTime="2025-12-04 15:30:04.858822316 +0000 UTC m=+1655.744865957" watchObservedRunningTime="2025-12-04 15:30:04.870490653 +0000 UTC m=+1655.756534294" Dec 04 15:30:04 crc kubenswrapper[4946]: I1204 15:30:04.903027 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=46.902998094 podStartE2EDuration="46.902998094s" podCreationTimestamp="2025-12-04 15:29:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:30:04.888265665 +0000 UTC m=+1655.774309296" watchObservedRunningTime="2025-12-04 15:30:04.902998094 +0000 UTC m=+1655.789041735" Dec 04 15:30:05 crc kubenswrapper[4946]: W1204 15:30:05.028737 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda29898d0_c1ba_435d_b43c_337236b65e84.slice/crio-31213434a21cdc5c2cc4d21d65660815ea95c94851f721fc0f79bcfdae0e6b6f WatchSource:0}: Error finding container 31213434a21cdc5c2cc4d21d65660815ea95c94851f721fc0f79bcfdae0e6b6f: Status 404 returned error can't find the container with id 31213434a21cdc5c2cc4d21d65660815ea95c94851f721fc0f79bcfdae0e6b6f Dec 04 15:30:05 crc kubenswrapper[4946]: I1204 15:30:05.037793 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv"] Dec 04 15:30:05 crc kubenswrapper[4946]: I1204 15:30:05.830693 4946 generic.go:334] "Generic (PLEG): container finished" podID="a29898d0-c1ba-435d-b43c-337236b65e84" containerID="ea80a56a38d5c371501103c9c4cad6015f0b91f338918f56fee498b0a208cad2" exitCode=0 Dec 04 15:30:05 crc kubenswrapper[4946]: I1204 15:30:05.830766 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" event={"ID":"a29898d0-c1ba-435d-b43c-337236b65e84","Type":"ContainerDied","Data":"ea80a56a38d5c371501103c9c4cad6015f0b91f338918f56fee498b0a208cad2"} Dec 04 15:30:05 crc kubenswrapper[4946]: I1204 15:30:05.831177 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" event={"ID":"a29898d0-c1ba-435d-b43c-337236b65e84","Type":"ContainerStarted","Data":"31213434a21cdc5c2cc4d21d65660815ea95c94851f721fc0f79bcfdae0e6b6f"} Dec 04 15:30:05 crc kubenswrapper[4946]: I1204 15:30:05.833706 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"25621d99-0fe9-42fe-a800-08160c4740aa","Type":"ContainerStarted","Data":"b29c3269cb1dee2dd23d962fcedfdb3b4d968ea2eefabe96e46d69c9dbea7d2d"} Dec 04 15:30:05 crc kubenswrapper[4946]: I1204 15:30:05.838947 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"602d77a3-3d2b-488d-ac47-74d9fd037d6c","Type":"ContainerStarted","Data":"1967a17d7cc39b1dbe0a8b1290f69152344d89454d8a67f4f3f0d6d72b48bc72"} Dec 04 15:30:05 crc kubenswrapper[4946]: I1204 15:30:05.904510 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=13.904479998 podStartE2EDuration="13.904479998s" podCreationTimestamp="2025-12-04 15:29:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 15:30:05.890660863 +0000 UTC m=+1656.776704504" watchObservedRunningTime="2025-12-04 15:30:05.904479998 +0000 UTC m=+1656.790523639" Dec 04 15:30:05 crc kubenswrapper[4946]: I1204 15:30:05.928648 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=5.058803625 podStartE2EDuration="14.928596212s" podCreationTimestamp="2025-12-04 15:29:51 +0000 UTC" firstStartedPulling="2025-12-04 15:29:54.374655913 +0000 UTC m=+1645.260699554" lastFinishedPulling="2025-12-04 15:30:04.2444485 +0000 UTC m=+1655.130492141" observedRunningTime="2025-12-04 15:30:05.914255213 +0000 UTC m=+1656.800298894" watchObservedRunningTime="2025-12-04 15:30:05.928596212 +0000 UTC m=+1656.814640063" Dec 04 15:30:06 crc kubenswrapper[4946]: I1204 15:30:06.453589 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:30:06 crc kubenswrapper[4946]: E1204 15:30:06.453920 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:30:06 crc kubenswrapper[4946]: I1204 15:30:06.903492 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.422307 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.441417 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a29898d0-c1ba-435d-b43c-337236b65e84-secret-volume\") pod \"a29898d0-c1ba-435d-b43c-337236b65e84\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.441620 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98qbj\" (UniqueName: \"kubernetes.io/projected/a29898d0-c1ba-435d-b43c-337236b65e84-kube-api-access-98qbj\") pod \"a29898d0-c1ba-435d-b43c-337236b65e84\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.441672 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a29898d0-c1ba-435d-b43c-337236b65e84-config-volume\") pod \"a29898d0-c1ba-435d-b43c-337236b65e84\" (UID: \"a29898d0-c1ba-435d-b43c-337236b65e84\") " Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.442749 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a29898d0-c1ba-435d-b43c-337236b65e84-config-volume" (OuterVolumeSpecName: "config-volume") pod "a29898d0-c1ba-435d-b43c-337236b65e84" (UID: "a29898d0-c1ba-435d-b43c-337236b65e84"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.472720 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a29898d0-c1ba-435d-b43c-337236b65e84-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a29898d0-c1ba-435d-b43c-337236b65e84" (UID: "a29898d0-c1ba-435d-b43c-337236b65e84"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.484656 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a29898d0-c1ba-435d-b43c-337236b65e84-kube-api-access-98qbj" (OuterVolumeSpecName: "kube-api-access-98qbj") pod "a29898d0-c1ba-435d-b43c-337236b65e84" (UID: "a29898d0-c1ba-435d-b43c-337236b65e84"). InnerVolumeSpecName "kube-api-access-98qbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.545293 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98qbj\" (UniqueName: \"kubernetes.io/projected/a29898d0-c1ba-435d-b43c-337236b65e84-kube-api-access-98qbj\") on node \"crc\" DevicePath \"\"" Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.545339 4946 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a29898d0-c1ba-435d-b43c-337236b65e84-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.545348 4946 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a29898d0-c1ba-435d-b43c-337236b65e84-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.915824 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.915816 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv" event={"ID":"a29898d0-c1ba-435d-b43c-337236b65e84","Type":"ContainerDied","Data":"31213434a21cdc5c2cc4d21d65660815ea95c94851f721fc0f79bcfdae0e6b6f"} Dec 04 15:30:07 crc kubenswrapper[4946]: I1204 15:30:07.915883 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31213434a21cdc5c2cc4d21d65660815ea95c94851f721fc0f79bcfdae0e6b6f" Dec 04 15:30:14 crc kubenswrapper[4946]: I1204 15:30:14.675766 4946 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="6458626f-136f-475a-b7ad-cf32977e39eb" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.238:5671: connect: connection refused" Dec 04 15:30:18 crc kubenswrapper[4946]: I1204 15:30:18.051839 4946 generic.go:334] "Generic (PLEG): container finished" podID="cc2c7406-87e9-4da5-b99c-845bddf4a05b" containerID="dac708a8094fa81c57f6bab2d81e8ab7bc5f1b2c6ff658859a2a12e040cfeb3e" exitCode=0 Dec 04 15:30:18 crc kubenswrapper[4946]: I1204 15:30:18.051928 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" event={"ID":"cc2c7406-87e9-4da5-b99c-845bddf4a05b","Type":"ContainerDied","Data":"dac708a8094fa81c57f6bab2d81e8ab7bc5f1b2c6ff658859a2a12e040cfeb3e"} Dec 04 15:30:18 crc kubenswrapper[4946]: I1204 15:30:18.943467 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.461576 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:30:19 crc kubenswrapper[4946]: E1204 15:30:19.462010 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.735995 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.839607 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcxhj\" (UniqueName: \"kubernetes.io/projected/cc2c7406-87e9-4da5-b99c-845bddf4a05b-kube-api-access-gcxhj\") pod \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.839752 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-ssh-key\") pod \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.839814 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-repo-setup-combined-ca-bundle\") pod \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.839885 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-inventory\") pod \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\" (UID: \"cc2c7406-87e9-4da5-b99c-845bddf4a05b\") " Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.855296 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "cc2c7406-87e9-4da5-b99c-845bddf4a05b" (UID: "cc2c7406-87e9-4da5-b99c-845bddf4a05b"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.855320 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc2c7406-87e9-4da5-b99c-845bddf4a05b-kube-api-access-gcxhj" (OuterVolumeSpecName: "kube-api-access-gcxhj") pod "cc2c7406-87e9-4da5-b99c-845bddf4a05b" (UID: "cc2c7406-87e9-4da5-b99c-845bddf4a05b"). InnerVolumeSpecName "kube-api-access-gcxhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.882619 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cc2c7406-87e9-4da5-b99c-845bddf4a05b" (UID: "cc2c7406-87e9-4da5-b99c-845bddf4a05b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.883879 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-inventory" (OuterVolumeSpecName: "inventory") pod "cc2c7406-87e9-4da5-b99c-845bddf4a05b" (UID: "cc2c7406-87e9-4da5-b99c-845bddf4a05b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.942756 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcxhj\" (UniqueName: \"kubernetes.io/projected/cc2c7406-87e9-4da5-b99c-845bddf4a05b-kube-api-access-gcxhj\") on node \"crc\" DevicePath \"\"" Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.942980 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.943079 4946 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:30:19 crc kubenswrapper[4946]: I1204 15:30:19.943152 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc2c7406-87e9-4da5-b99c-845bddf4a05b-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.099335 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" event={"ID":"cc2c7406-87e9-4da5-b99c-845bddf4a05b","Type":"ContainerDied","Data":"2219463c6ccb9d2ae116096fce378d5b66406476079ff2beaf1a064478487270"} Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.099631 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2219463c6ccb9d2ae116096fce378d5b66406476079ff2beaf1a064478487270" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.099354 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.168594 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd"] Dec 04 15:30:20 crc kubenswrapper[4946]: E1204 15:30:20.169033 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29898d0-c1ba-435d-b43c-337236b65e84" containerName="collect-profiles" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.169047 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29898d0-c1ba-435d-b43c-337236b65e84" containerName="collect-profiles" Dec 04 15:30:20 crc kubenswrapper[4946]: E1204 15:30:20.169068 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc2c7406-87e9-4da5-b99c-845bddf4a05b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.169075 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc2c7406-87e9-4da5-b99c-845bddf4a05b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.169283 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc2c7406-87e9-4da5-b99c-845bddf4a05b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.169314 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a29898d0-c1ba-435d-b43c-337236b65e84" containerName="collect-profiles" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.170571 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.185383 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.185760 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.185983 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.186179 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.235901 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd"] Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.350777 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb6cq\" (UniqueName: \"kubernetes.io/projected/34194ffb-2211-4d3b-820e-87e8008211a8-kube-api-access-lb6cq\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-h9gzd\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.350847 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-h9gzd\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.350946 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-h9gzd\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.452599 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-h9gzd\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.452741 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-h9gzd\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.452854 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb6cq\" (UniqueName: \"kubernetes.io/projected/34194ffb-2211-4d3b-820e-87e8008211a8-kube-api-access-lb6cq\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-h9gzd\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.457335 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-h9gzd\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.459358 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-h9gzd\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.483202 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb6cq\" (UniqueName: \"kubernetes.io/projected/34194ffb-2211-4d3b-820e-87e8008211a8-kube-api-access-lb6cq\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-h9gzd\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:20 crc kubenswrapper[4946]: I1204 15:30:20.584632 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:21 crc kubenswrapper[4946]: I1204 15:30:21.367475 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd"] Dec 04 15:30:22 crc kubenswrapper[4946]: I1204 15:30:22.128915 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" event={"ID":"34194ffb-2211-4d3b-820e-87e8008211a8","Type":"ContainerStarted","Data":"790c2a513e34dd548de964f9cbe7b03b2752b52634596d30b850bf502e04336f"} Dec 04 15:30:23 crc kubenswrapper[4946]: I1204 15:30:23.148497 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" event={"ID":"34194ffb-2211-4d3b-820e-87e8008211a8","Type":"ContainerStarted","Data":"b71b293656360de29f80fc23b70d98bd832728a1ade42601cc01a2c4f1fe84c2"} Dec 04 15:30:23 crc kubenswrapper[4946]: I1204 15:30:23.171661 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" podStartSLOduration=2.672759574 podStartE2EDuration="3.17163952s" podCreationTimestamp="2025-12-04 15:30:20 +0000 UTC" firstStartedPulling="2025-12-04 15:30:21.380548628 +0000 UTC m=+1672.266592269" lastFinishedPulling="2025-12-04 15:30:21.879428574 +0000 UTC m=+1672.765472215" observedRunningTime="2025-12-04 15:30:23.165843273 +0000 UTC m=+1674.051886924" watchObservedRunningTime="2025-12-04 15:30:23.17163952 +0000 UTC m=+1674.057683171" Dec 04 15:30:24 crc kubenswrapper[4946]: I1204 15:30:24.675258 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 04 15:30:25 crc kubenswrapper[4946]: I1204 15:30:25.170364 4946 generic.go:334] "Generic (PLEG): container finished" podID="34194ffb-2211-4d3b-820e-87e8008211a8" containerID="b71b293656360de29f80fc23b70d98bd832728a1ade42601cc01a2c4f1fe84c2" exitCode=0 Dec 04 15:30:25 crc kubenswrapper[4946]: I1204 15:30:25.170426 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" event={"ID":"34194ffb-2211-4d3b-820e-87e8008211a8","Type":"ContainerDied","Data":"b71b293656360de29f80fc23b70d98bd832728a1ade42601cc01a2c4f1fe84c2"} Dec 04 15:30:26 crc kubenswrapper[4946]: I1204 15:30:26.716423 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:26 crc kubenswrapper[4946]: I1204 15:30:26.842779 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb6cq\" (UniqueName: \"kubernetes.io/projected/34194ffb-2211-4d3b-820e-87e8008211a8-kube-api-access-lb6cq\") pod \"34194ffb-2211-4d3b-820e-87e8008211a8\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " Dec 04 15:30:26 crc kubenswrapper[4946]: I1204 15:30:26.842867 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-inventory\") pod \"34194ffb-2211-4d3b-820e-87e8008211a8\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " Dec 04 15:30:26 crc kubenswrapper[4946]: I1204 15:30:26.843194 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-ssh-key\") pod \"34194ffb-2211-4d3b-820e-87e8008211a8\" (UID: \"34194ffb-2211-4d3b-820e-87e8008211a8\") " Dec 04 15:30:26 crc kubenswrapper[4946]: I1204 15:30:26.849489 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34194ffb-2211-4d3b-820e-87e8008211a8-kube-api-access-lb6cq" (OuterVolumeSpecName: "kube-api-access-lb6cq") pod "34194ffb-2211-4d3b-820e-87e8008211a8" (UID: "34194ffb-2211-4d3b-820e-87e8008211a8"). InnerVolumeSpecName "kube-api-access-lb6cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:30:26 crc kubenswrapper[4946]: I1204 15:30:26.883470 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-inventory" (OuterVolumeSpecName: "inventory") pod "34194ffb-2211-4d3b-820e-87e8008211a8" (UID: "34194ffb-2211-4d3b-820e-87e8008211a8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:30:26 crc kubenswrapper[4946]: I1204 15:30:26.894352 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "34194ffb-2211-4d3b-820e-87e8008211a8" (UID: "34194ffb-2211-4d3b-820e-87e8008211a8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:30:26 crc kubenswrapper[4946]: I1204 15:30:26.945483 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:30:26 crc kubenswrapper[4946]: I1204 15:30:26.945519 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb6cq\" (UniqueName: \"kubernetes.io/projected/34194ffb-2211-4d3b-820e-87e8008211a8-kube-api-access-lb6cq\") on node \"crc\" DevicePath \"\"" Dec 04 15:30:26 crc kubenswrapper[4946]: I1204 15:30:26.945530 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34194ffb-2211-4d3b-820e-87e8008211a8-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.194925 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" event={"ID":"34194ffb-2211-4d3b-820e-87e8008211a8","Type":"ContainerDied","Data":"790c2a513e34dd548de964f9cbe7b03b2752b52634596d30b850bf502e04336f"} Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.194990 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="790c2a513e34dd548de964f9cbe7b03b2752b52634596d30b850bf502e04336f" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.195004 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-h9gzd" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.317318 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c"] Dec 04 15:30:27 crc kubenswrapper[4946]: E1204 15:30:27.318191 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34194ffb-2211-4d3b-820e-87e8008211a8" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.318223 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="34194ffb-2211-4d3b-820e-87e8008211a8" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.318607 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="34194ffb-2211-4d3b-820e-87e8008211a8" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.319970 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.322815 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.323130 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.324304 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.325068 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.328317 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c"] Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.454753 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.454838 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.454882 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n9k9\" (UniqueName: \"kubernetes.io/projected/59863a34-23ab-44bb-be9a-dae51f8dd6c1-kube-api-access-9n9k9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.455092 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.557359 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.557696 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.557817 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n9k9\" (UniqueName: \"kubernetes.io/projected/59863a34-23ab-44bb-be9a-dae51f8dd6c1-kube-api-access-9n9k9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.558032 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.565005 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.565299 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.566275 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.582270 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n9k9\" (UniqueName: \"kubernetes.io/projected/59863a34-23ab-44bb-be9a-dae51f8dd6c1-kube-api-access-9n9k9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:27 crc kubenswrapper[4946]: I1204 15:30:27.648653 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:30:28 crc kubenswrapper[4946]: I1204 15:30:28.429585 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c"] Dec 04 15:30:29 crc kubenswrapper[4946]: I1204 15:30:29.218548 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" event={"ID":"59863a34-23ab-44bb-be9a-dae51f8dd6c1","Type":"ContainerStarted","Data":"9f37d4e19b710672fb68c750987187c4c54a2965ecb83ce946ef447c50eff465"} Dec 04 15:30:29 crc kubenswrapper[4946]: I1204 15:30:29.218998 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" event={"ID":"59863a34-23ab-44bb-be9a-dae51f8dd6c1","Type":"ContainerStarted","Data":"e1a88dec8015322a06b4ccc04c005110406cdbe33e3da8886b33e464ee7e7141"} Dec 04 15:30:29 crc kubenswrapper[4946]: I1204 15:30:29.248105 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" podStartSLOduration=1.76628911 podStartE2EDuration="2.248082184s" podCreationTimestamp="2025-12-04 15:30:27 +0000 UTC" firstStartedPulling="2025-12-04 15:30:28.441770071 +0000 UTC m=+1679.327813712" lastFinishedPulling="2025-12-04 15:30:28.923563125 +0000 UTC m=+1679.809606786" observedRunningTime="2025-12-04 15:30:29.237411144 +0000 UTC m=+1680.123454785" watchObservedRunningTime="2025-12-04 15:30:29.248082184 +0000 UTC m=+1680.134125825" Dec 04 15:30:31 crc kubenswrapper[4946]: I1204 15:30:31.453793 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:30:31 crc kubenswrapper[4946]: E1204 15:30:31.454758 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:30:40 crc kubenswrapper[4946]: I1204 15:30:40.129586 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Dec 04 15:30:42 crc kubenswrapper[4946]: I1204 15:30:42.101591 4946 scope.go:117] "RemoveContainer" containerID="cb124d636114954f01e8cce63ad55c49fbf55c8d5b57dc02efd225cb45a9b910" Dec 04 15:30:45 crc kubenswrapper[4946]: I1204 15:30:45.452995 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:30:45 crc kubenswrapper[4946]: E1204 15:30:45.456747 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:31:00 crc kubenswrapper[4946]: I1204 15:31:00.453181 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:31:00 crc kubenswrapper[4946]: E1204 15:31:00.454101 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:31:15 crc kubenswrapper[4946]: I1204 15:31:15.452449 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:31:15 crc kubenswrapper[4946]: E1204 15:31:15.453287 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:31:29 crc kubenswrapper[4946]: I1204 15:31:29.462936 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:31:29 crc kubenswrapper[4946]: E1204 15:31:29.465794 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:31:42 crc kubenswrapper[4946]: I1204 15:31:42.249066 4946 scope.go:117] "RemoveContainer" containerID="b53922ed5de7d8d9049a3399f7197909ea2442d4593399fa4b1e9dea556ab77c" Dec 04 15:31:42 crc kubenswrapper[4946]: I1204 15:31:42.307312 4946 scope.go:117] "RemoveContainer" containerID="ce0faf47d27ebd0d8e26634dcc9ecd904c2dc788a92ec2518a8af894edcd4482" Dec 04 15:31:42 crc kubenswrapper[4946]: I1204 15:31:42.363994 4946 scope.go:117] "RemoveContainer" containerID="d73f51f2bfc21958963cbd4bd41d0d72350ef5cac03b0b891b42ebbcc75a7399" Dec 04 15:31:42 crc kubenswrapper[4946]: I1204 15:31:42.418651 4946 scope.go:117] "RemoveContainer" containerID="fa591cb12a326a0be71c8f464fcc92ed7afe15e10f7e6befabefbc25961efe9b" Dec 04 15:31:44 crc kubenswrapper[4946]: I1204 15:31:44.454307 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:31:44 crc kubenswrapper[4946]: E1204 15:31:44.456710 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:31:57 crc kubenswrapper[4946]: I1204 15:31:57.453732 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:31:57 crc kubenswrapper[4946]: E1204 15:31:57.454683 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:32:11 crc kubenswrapper[4946]: I1204 15:32:11.453235 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:32:11 crc kubenswrapper[4946]: E1204 15:32:11.454221 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:32:25 crc kubenswrapper[4946]: I1204 15:32:25.453837 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:32:25 crc kubenswrapper[4946]: E1204 15:32:25.454380 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:32:39 crc kubenswrapper[4946]: I1204 15:32:39.460983 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:32:39 crc kubenswrapper[4946]: E1204 15:32:39.462074 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:32:53 crc kubenswrapper[4946]: I1204 15:32:53.453688 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:32:53 crc kubenswrapper[4946]: E1204 15:32:53.454696 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:33:08 crc kubenswrapper[4946]: I1204 15:33:08.454303 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:33:08 crc kubenswrapper[4946]: E1204 15:33:08.455347 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:33:11 crc kubenswrapper[4946]: I1204 15:33:11.056093 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-mwbv8"] Dec 04 15:33:11 crc kubenswrapper[4946]: I1204 15:33:11.068584 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-3c1c-account-create-update-hv5bl"] Dec 04 15:33:11 crc kubenswrapper[4946]: I1204 15:33:11.080766 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-mwbv8"] Dec 04 15:33:11 crc kubenswrapper[4946]: I1204 15:33:11.094196 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-3c1c-account-create-update-hv5bl"] Dec 04 15:33:11 crc kubenswrapper[4946]: I1204 15:33:11.473544 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11b8d15d-d063-478d-8f4f-82d950f9aa2f" path="/var/lib/kubelet/pods/11b8d15d-d063-478d-8f4f-82d950f9aa2f/volumes" Dec 04 15:33:11 crc kubenswrapper[4946]: I1204 15:33:11.474813 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="513bc3b9-8ae8-4e8b-b02d-fb17f48f7921" path="/var/lib/kubelet/pods/513bc3b9-8ae8-4e8b-b02d-fb17f48f7921/volumes" Dec 04 15:33:14 crc kubenswrapper[4946]: I1204 15:33:14.045042 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-ggqwz"] Dec 04 15:33:14 crc kubenswrapper[4946]: I1204 15:33:14.059563 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-ggqwz"] Dec 04 15:33:15 crc kubenswrapper[4946]: I1204 15:33:15.039487 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-0373-account-create-update-dx68q"] Dec 04 15:33:15 crc kubenswrapper[4946]: I1204 15:33:15.052829 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-0353-account-create-update-cqjvb"] Dec 04 15:33:15 crc kubenswrapper[4946]: I1204 15:33:15.064792 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-7x4qr"] Dec 04 15:33:15 crc kubenswrapper[4946]: I1204 15:33:15.073829 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-0353-account-create-update-cqjvb"] Dec 04 15:33:15 crc kubenswrapper[4946]: I1204 15:33:15.084137 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-0373-account-create-update-dx68q"] Dec 04 15:33:15 crc kubenswrapper[4946]: I1204 15:33:15.093495 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-7x4qr"] Dec 04 15:33:15 crc kubenswrapper[4946]: I1204 15:33:15.467081 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14" path="/var/lib/kubelet/pods/2a3c0985-1155-4e0e-b3ab-d8ff63d8fe14/volumes" Dec 04 15:33:15 crc kubenswrapper[4946]: I1204 15:33:15.468640 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3825a065-e047-480e-a2a0-3aa2a1bdba24" path="/var/lib/kubelet/pods/3825a065-e047-480e-a2a0-3aa2a1bdba24/volumes" Dec 04 15:33:15 crc kubenswrapper[4946]: I1204 15:33:15.469772 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="464a3ed7-5fda-401e-973a-17f8f510a312" path="/var/lib/kubelet/pods/464a3ed7-5fda-401e-973a-17f8f510a312/volumes" Dec 04 15:33:15 crc kubenswrapper[4946]: I1204 15:33:15.470874 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77ca1c4a-5e40-484a-ab87-c6cedf677c47" path="/var/lib/kubelet/pods/77ca1c4a-5e40-484a-ab87-c6cedf677c47/volumes" Dec 04 15:33:23 crc kubenswrapper[4946]: I1204 15:33:23.452984 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:33:23 crc kubenswrapper[4946]: E1204 15:33:23.453871 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:33:37 crc kubenswrapper[4946]: I1204 15:33:37.453346 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:33:37 crc kubenswrapper[4946]: E1204 15:33:37.454293 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:33:40 crc kubenswrapper[4946]: I1204 15:33:40.054591 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-create-wpggf"] Dec 04 15:33:40 crc kubenswrapper[4946]: I1204 15:33:40.064751 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-create-wpggf"] Dec 04 15:33:41 crc kubenswrapper[4946]: I1204 15:33:41.044469 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-c8d8-account-create-update-kpzbx"] Dec 04 15:33:41 crc kubenswrapper[4946]: I1204 15:33:41.056402 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-c72b-account-create-update-tc59c"] Dec 04 15:33:41 crc kubenswrapper[4946]: I1204 15:33:41.078822 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-c8d8-account-create-update-kpzbx"] Dec 04 15:33:41 crc kubenswrapper[4946]: I1204 15:33:41.089845 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-c72b-account-create-update-tc59c"] Dec 04 15:33:41 crc kubenswrapper[4946]: I1204 15:33:41.473078 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96a446b9-7b24-42fa-b6bb-99a22e323530" path="/var/lib/kubelet/pods/96a446b9-7b24-42fa-b6bb-99a22e323530/volumes" Dec 04 15:33:41 crc kubenswrapper[4946]: I1204 15:33:41.475564 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98ae66d2-d939-4351-bf32-f649f37df068" path="/var/lib/kubelet/pods/98ae66d2-d939-4351-bf32-f649f37df068/volumes" Dec 04 15:33:41 crc kubenswrapper[4946]: I1204 15:33:41.477479 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dbc4429-4eb9-4a7d-bd88-062fab6e1237" path="/var/lib/kubelet/pods/9dbc4429-4eb9-4a7d-bd88-062fab6e1237/volumes" Dec 04 15:33:42 crc kubenswrapper[4946]: I1204 15:33:42.590992 4946 scope.go:117] "RemoveContainer" containerID="d168994c47748309f8b562b57b2f3b75ec74d803184f5f374fad2fdcea780a02" Dec 04 15:33:42 crc kubenswrapper[4946]: I1204 15:33:42.624857 4946 scope.go:117] "RemoveContainer" containerID="3124cb27f05cf471281baf0b088ac5759fa62d47d5d5c6352f3b191f4ec491a4" Dec 04 15:33:42 crc kubenswrapper[4946]: I1204 15:33:42.656167 4946 scope.go:117] "RemoveContainer" containerID="681aaf0480a8cbf95e341377ca849c884d2988c38f2941f101dcb36df215c868" Dec 04 15:33:42 crc kubenswrapper[4946]: I1204 15:33:42.741818 4946 scope.go:117] "RemoveContainer" containerID="d0ad6217a4ac5111cc56d68909269f05cd5dcca0b234c56d2904891c04b09d39" Dec 04 15:33:42 crc kubenswrapper[4946]: I1204 15:33:42.791860 4946 scope.go:117] "RemoveContainer" containerID="00a7e538c3d0fe4c4dab760135e48f542dbbd468679d72b470d691d9ad41a628" Dec 04 15:33:42 crc kubenswrapper[4946]: I1204 15:33:42.851417 4946 scope.go:117] "RemoveContainer" containerID="9d713b888216b499c051e34e954e944be7f05ef6f85ca36968fe31de92d5cd0b" Dec 04 15:33:42 crc kubenswrapper[4946]: I1204 15:33:42.905358 4946 scope.go:117] "RemoveContainer" containerID="1230aca8e177488b97696cdc7756cbb89d937bccc36045b94960d075815c3205" Dec 04 15:33:42 crc kubenswrapper[4946]: I1204 15:33:42.963373 4946 scope.go:117] "RemoveContainer" containerID="f19688e673391624288c47de1f217dd8e9396af4280d16ba125328710436b3fc" Dec 04 15:33:43 crc kubenswrapper[4946]: I1204 15:33:43.003318 4946 scope.go:117] "RemoveContainer" containerID="b639cc5eadcc71a9941089bbca478789ca9c2e9958b640434f64f96571b0470b" Dec 04 15:33:43 crc kubenswrapper[4946]: I1204 15:33:43.030257 4946 scope.go:117] "RemoveContainer" containerID="544ba986150b8f5a299b10cc7d15631439d7454fbb17bc8815f57ec59a33edfd" Dec 04 15:33:43 crc kubenswrapper[4946]: I1204 15:33:43.058940 4946 scope.go:117] "RemoveContainer" containerID="c353be8ba5469f93b16fd811a10bb3b69e2f449da844c5a4ce804eed88af8553" Dec 04 15:33:43 crc kubenswrapper[4946]: I1204 15:33:43.093964 4946 scope.go:117] "RemoveContainer" containerID="4cf5db641d4bd34f638dec88f3a1345b8500314424bef26f383a7b145e33bdb6" Dec 04 15:33:44 crc kubenswrapper[4946]: I1204 15:33:44.035662 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-2e71-account-create-update-9w6zw"] Dec 04 15:33:44 crc kubenswrapper[4946]: I1204 15:33:44.049777 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-qv6jf"] Dec 04 15:33:44 crc kubenswrapper[4946]: I1204 15:33:44.058915 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-2e71-account-create-update-9w6zw"] Dec 04 15:33:44 crc kubenswrapper[4946]: I1204 15:33:44.067261 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-623f-account-create-update-hlk8q"] Dec 04 15:33:44 crc kubenswrapper[4946]: I1204 15:33:44.076096 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-qv6jf"] Dec 04 15:33:44 crc kubenswrapper[4946]: I1204 15:33:44.084693 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-4w6dl"] Dec 04 15:33:44 crc kubenswrapper[4946]: I1204 15:33:44.092780 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-sf24r"] Dec 04 15:33:44 crc kubenswrapper[4946]: I1204 15:33:44.100666 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-4w6dl"] Dec 04 15:33:44 crc kubenswrapper[4946]: I1204 15:33:44.108679 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-623f-account-create-update-hlk8q"] Dec 04 15:33:44 crc kubenswrapper[4946]: I1204 15:33:44.117127 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-sf24r"] Dec 04 15:33:45 crc kubenswrapper[4946]: I1204 15:33:45.470741 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35e0e7f6-eb97-4c70-8970-c9686b1579b7" path="/var/lib/kubelet/pods/35e0e7f6-eb97-4c70-8970-c9686b1579b7/volumes" Dec 04 15:33:45 crc kubenswrapper[4946]: I1204 15:33:45.550212 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c809ac2-b396-40ae-ac9c-0eb18befdf08" path="/var/lib/kubelet/pods/5c809ac2-b396-40ae-ac9c-0eb18befdf08/volumes" Dec 04 15:33:45 crc kubenswrapper[4946]: I1204 15:33:45.550956 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77f8c12b-a81e-4b7a-b153-17f4320daeb5" path="/var/lib/kubelet/pods/77f8c12b-a81e-4b7a-b153-17f4320daeb5/volumes" Dec 04 15:33:45 crc kubenswrapper[4946]: I1204 15:33:45.552003 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99713bb8-2c08-402d-ba82-45e2e64ef670" path="/var/lib/kubelet/pods/99713bb8-2c08-402d-ba82-45e2e64ef670/volumes" Dec 04 15:33:45 crc kubenswrapper[4946]: I1204 15:33:45.553589 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e935b12d-7145-47e5-b691-57bd2c9f1fac" path="/var/lib/kubelet/pods/e935b12d-7145-47e5-b691-57bd2c9f1fac/volumes" Dec 04 15:33:50 crc kubenswrapper[4946]: I1204 15:33:50.454951 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:33:50 crc kubenswrapper[4946]: E1204 15:33:50.455904 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:33:53 crc kubenswrapper[4946]: I1204 15:33:53.035490 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-gzkww"] Dec 04 15:33:53 crc kubenswrapper[4946]: I1204 15:33:53.044328 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-gzkww"] Dec 04 15:33:53 crc kubenswrapper[4946]: I1204 15:33:53.467505 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f646177-69a6-42cf-9d49-8be8541c58c1" path="/var/lib/kubelet/pods/4f646177-69a6-42cf-9d49-8be8541c58c1/volumes" Dec 04 15:34:01 crc kubenswrapper[4946]: I1204 15:34:01.453838 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:34:01 crc kubenswrapper[4946]: E1204 15:34:01.455376 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:34:13 crc kubenswrapper[4946]: I1204 15:34:13.190103 4946 generic.go:334] "Generic (PLEG): container finished" podID="59863a34-23ab-44bb-be9a-dae51f8dd6c1" containerID="9f37d4e19b710672fb68c750987187c4c54a2965ecb83ce946ef447c50eff465" exitCode=0 Dec 04 15:34:13 crc kubenswrapper[4946]: I1204 15:34:13.190224 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" event={"ID":"59863a34-23ab-44bb-be9a-dae51f8dd6c1","Type":"ContainerDied","Data":"9f37d4e19b710672fb68c750987187c4c54a2965ecb83ce946ef447c50eff465"} Dec 04 15:34:14 crc kubenswrapper[4946]: I1204 15:34:14.804025 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:34:14 crc kubenswrapper[4946]: I1204 15:34:14.899042 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-bootstrap-combined-ca-bundle\") pod \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " Dec 04 15:34:14 crc kubenswrapper[4946]: I1204 15:34:14.899442 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-inventory\") pod \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " Dec 04 15:34:14 crc kubenswrapper[4946]: I1204 15:34:14.899588 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-ssh-key\") pod \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " Dec 04 15:34:14 crc kubenswrapper[4946]: I1204 15:34:14.899696 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9n9k9\" (UniqueName: \"kubernetes.io/projected/59863a34-23ab-44bb-be9a-dae51f8dd6c1-kube-api-access-9n9k9\") pod \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\" (UID: \"59863a34-23ab-44bb-be9a-dae51f8dd6c1\") " Dec 04 15:34:14 crc kubenswrapper[4946]: I1204 15:34:14.909444 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59863a34-23ab-44bb-be9a-dae51f8dd6c1-kube-api-access-9n9k9" (OuterVolumeSpecName: "kube-api-access-9n9k9") pod "59863a34-23ab-44bb-be9a-dae51f8dd6c1" (UID: "59863a34-23ab-44bb-be9a-dae51f8dd6c1"). InnerVolumeSpecName "kube-api-access-9n9k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:34:14 crc kubenswrapper[4946]: I1204 15:34:14.909601 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "59863a34-23ab-44bb-be9a-dae51f8dd6c1" (UID: "59863a34-23ab-44bb-be9a-dae51f8dd6c1"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:34:14 crc kubenswrapper[4946]: I1204 15:34:14.941822 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-inventory" (OuterVolumeSpecName: "inventory") pod "59863a34-23ab-44bb-be9a-dae51f8dd6c1" (UID: "59863a34-23ab-44bb-be9a-dae51f8dd6c1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:34:14 crc kubenswrapper[4946]: I1204 15:34:14.949882 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "59863a34-23ab-44bb-be9a-dae51f8dd6c1" (UID: "59863a34-23ab-44bb-be9a-dae51f8dd6c1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.002166 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.002215 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.002230 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9n9k9\" (UniqueName: \"kubernetes.io/projected/59863a34-23ab-44bb-be9a-dae51f8dd6c1-kube-api-access-9n9k9\") on node \"crc\" DevicePath \"\"" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.002249 4946 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59863a34-23ab-44bb-be9a-dae51f8dd6c1-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.218151 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" event={"ID":"59863a34-23ab-44bb-be9a-dae51f8dd6c1","Type":"ContainerDied","Data":"e1a88dec8015322a06b4ccc04c005110406cdbe33e3da8886b33e464ee7e7141"} Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.218211 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1a88dec8015322a06b4ccc04c005110406cdbe33e3da8886b33e464ee7e7141" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.218243 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.315538 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67"] Dec 04 15:34:15 crc kubenswrapper[4946]: E1204 15:34:15.316025 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59863a34-23ab-44bb-be9a-dae51f8dd6c1" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.316042 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="59863a34-23ab-44bb-be9a-dae51f8dd6c1" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.322943 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="59863a34-23ab-44bb-be9a-dae51f8dd6c1" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.323838 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.326389 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.326898 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.326916 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.327432 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.331838 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67"] Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.412469 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h6x67\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.412836 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lv97\" (UniqueName: \"kubernetes.io/projected/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-kube-api-access-8lv97\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h6x67\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.413144 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h6x67\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.452986 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:34:15 crc kubenswrapper[4946]: E1204 15:34:15.453479 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.515661 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h6x67\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.515731 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lv97\" (UniqueName: \"kubernetes.io/projected/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-kube-api-access-8lv97\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h6x67\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.515948 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h6x67\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.522720 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h6x67\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.523972 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h6x67\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.547006 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lv97\" (UniqueName: \"kubernetes.io/projected/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-kube-api-access-8lv97\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h6x67\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:34:15 crc kubenswrapper[4946]: I1204 15:34:15.647317 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:34:16 crc kubenswrapper[4946]: I1204 15:34:16.295918 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67"] Dec 04 15:34:17 crc kubenswrapper[4946]: I1204 15:34:17.252144 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" event={"ID":"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c","Type":"ContainerStarted","Data":"0d26ef2e1454c2589d64a581f8375a335018c4487afa40e9ba3fd4522c2c473b"} Dec 04 15:34:17 crc kubenswrapper[4946]: I1204 15:34:17.252635 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" event={"ID":"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c","Type":"ContainerStarted","Data":"f9758473d938fc11f89ebba3242aa6dfe74cf293db34cb8f45e2fd1556ddfa06"} Dec 04 15:34:17 crc kubenswrapper[4946]: I1204 15:34:17.276458 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" podStartSLOduration=1.851547269 podStartE2EDuration="2.276438673s" podCreationTimestamp="2025-12-04 15:34:15 +0000 UTC" firstStartedPulling="2025-12-04 15:34:16.274533472 +0000 UTC m=+1907.160577113" lastFinishedPulling="2025-12-04 15:34:16.699424886 +0000 UTC m=+1907.585468517" observedRunningTime="2025-12-04 15:34:17.27032415 +0000 UTC m=+1908.156367811" watchObservedRunningTime="2025-12-04 15:34:17.276438673 +0000 UTC m=+1908.162482324" Dec 04 15:34:19 crc kubenswrapper[4946]: I1204 15:34:19.056305 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-tb5mp"] Dec 04 15:34:19 crc kubenswrapper[4946]: I1204 15:34:19.070736 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-tb5mp"] Dec 04 15:34:19 crc kubenswrapper[4946]: I1204 15:34:19.468702 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a4e74d1-f18d-4356-be6d-10171056d511" path="/var/lib/kubelet/pods/2a4e74d1-f18d-4356-be6d-10171056d511/volumes" Dec 04 15:34:26 crc kubenswrapper[4946]: I1204 15:34:26.454777 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:34:26 crc kubenswrapper[4946]: E1204 15:34:26.456417 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:34:32 crc kubenswrapper[4946]: I1204 15:34:32.040473 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-x2xgf"] Dec 04 15:34:32 crc kubenswrapper[4946]: I1204 15:34:32.054453 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-x2xgf"] Dec 04 15:34:33 crc kubenswrapper[4946]: I1204 15:34:33.467628 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53776995-5c2b-44a6-bbd2-ff624640c0b3" path="/var/lib/kubelet/pods/53776995-5c2b-44a6-bbd2-ff624640c0b3/volumes" Dec 04 15:34:41 crc kubenswrapper[4946]: I1204 15:34:41.452853 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:34:41 crc kubenswrapper[4946]: E1204 15:34:41.453981 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:34:43 crc kubenswrapper[4946]: I1204 15:34:43.361077 4946 scope.go:117] "RemoveContainer" containerID="1050643e71f1a3316be0926b85a23c686c56b61696ff4f0e51e7469646e8ee85" Dec 04 15:34:43 crc kubenswrapper[4946]: I1204 15:34:43.400812 4946 scope.go:117] "RemoveContainer" containerID="fb873cdca7ad35413129ab3ef76d927e7a73f30b001c4770a1d791971e331b12" Dec 04 15:34:43 crc kubenswrapper[4946]: I1204 15:34:43.494661 4946 scope.go:117] "RemoveContainer" containerID="4021bee0f4d236cd7ba8f99489e15e19d390b37b0bd48940f3d6f57707ac0b57" Dec 04 15:34:43 crc kubenswrapper[4946]: I1204 15:34:43.547255 4946 scope.go:117] "RemoveContainer" containerID="9b259f85f0124a2d4d84c3ed3d78d2b72042e0ec09467385688507572f9b0f22" Dec 04 15:34:43 crc kubenswrapper[4946]: I1204 15:34:43.582106 4946 scope.go:117] "RemoveContainer" containerID="4274ff774468bb4c61f9e42263c8acee46be3eb6297d25aab29a2fd62cacd8b9" Dec 04 15:34:43 crc kubenswrapper[4946]: I1204 15:34:43.641366 4946 scope.go:117] "RemoveContainer" containerID="9cb67e95c5fbc2a0dd7e351697d2b13f8759c6be87c1162677ac584a152961e1" Dec 04 15:34:43 crc kubenswrapper[4946]: I1204 15:34:43.678523 4946 scope.go:117] "RemoveContainer" containerID="cef4183916354fd644469fc88df61c2c5b305125508468352a41d6b663967ffd" Dec 04 15:34:43 crc kubenswrapper[4946]: I1204 15:34:43.719605 4946 scope.go:117] "RemoveContainer" containerID="ffead12db69d190a6075fb361de3c807760336976d12001249eb4e47eeba1fe5" Dec 04 15:34:54 crc kubenswrapper[4946]: I1204 15:34:54.453850 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:34:55 crc kubenswrapper[4946]: I1204 15:34:55.737144 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"f779b15ef6675c268d0553f67a4341f7aaa97f77eb86eee0a5fcf482005b8efc"} Dec 04 15:34:59 crc kubenswrapper[4946]: I1204 15:34:59.082138 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-fzhsp"] Dec 04 15:34:59 crc kubenswrapper[4946]: I1204 15:34:59.098109 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-q4qx6"] Dec 04 15:34:59 crc kubenswrapper[4946]: I1204 15:34:59.111624 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-fzhsp"] Dec 04 15:34:59 crc kubenswrapper[4946]: I1204 15:34:59.124803 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-q4qx6"] Dec 04 15:34:59 crc kubenswrapper[4946]: I1204 15:34:59.471326 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79b3e881-2a15-43cf-aefa-b0b4dc1f5935" path="/var/lib/kubelet/pods/79b3e881-2a15-43cf-aefa-b0b4dc1f5935/volumes" Dec 04 15:34:59 crc kubenswrapper[4946]: I1204 15:34:59.472509 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c94e50af-9ae2-4ed6-a351-ccff8209cd55" path="/var/lib/kubelet/pods/c94e50af-9ae2-4ed6-a351-ccff8209cd55/volumes" Dec 04 15:35:01 crc kubenswrapper[4946]: I1204 15:35:01.049509 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-tvf2t"] Dec 04 15:35:01 crc kubenswrapper[4946]: I1204 15:35:01.069292 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-tvf2t"] Dec 04 15:35:01 crc kubenswrapper[4946]: I1204 15:35:01.465029 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a2d2577-fc5e-4375-8c8f-154aa218707f" path="/var/lib/kubelet/pods/7a2d2577-fc5e-4375-8c8f-154aa218707f/volumes" Dec 04 15:35:03 crc kubenswrapper[4946]: I1204 15:35:03.044373 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-cqwq4"] Dec 04 15:35:03 crc kubenswrapper[4946]: I1204 15:35:03.058825 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-cqwq4"] Dec 04 15:35:03 crc kubenswrapper[4946]: I1204 15:35:03.471933 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fe7f895-e33e-4159-9dcd-689158d16f22" path="/var/lib/kubelet/pods/8fe7f895-e33e-4159-9dcd-689158d16f22/volumes" Dec 04 15:35:43 crc kubenswrapper[4946]: I1204 15:35:43.921409 4946 scope.go:117] "RemoveContainer" containerID="03bc0e32c860c9d532acd53ed6cb6845f89746360357f564ecf12f8a29009193" Dec 04 15:35:44 crc kubenswrapper[4946]: I1204 15:35:44.010679 4946 scope.go:117] "RemoveContainer" containerID="92d9c0475208571121aca1c206c5f8e190ab1b720f04730a7c8f3d0143b3de7b" Dec 04 15:35:44 crc kubenswrapper[4946]: I1204 15:35:44.064325 4946 scope.go:117] "RemoveContainer" containerID="4a584f8c85227b7e4909b4169e6dd4224d00797c8adccb9ed321bf793d24a3f8" Dec 04 15:35:44 crc kubenswrapper[4946]: I1204 15:35:44.123622 4946 scope.go:117] "RemoveContainer" containerID="69d68757db23ce226565173643b4ba8f92219d161e8d4837acffdeb9b1a1ed47" Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.060365 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-hckh5"] Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.072977 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-hckh5"] Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.084660 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-xcjh9"] Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.098362 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-x5rz8"] Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.134711 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-c9a7-account-create-update-ppsl7"] Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.149022 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-x5rz8"] Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.163381 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-xcjh9"] Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.177201 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-345d-account-create-update-zlp2s"] Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.187529 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-c9a7-account-create-update-ppsl7"] Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.196318 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-345d-account-create-update-zlp2s"] Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.883454 4946 generic.go:334] "Generic (PLEG): container finished" podID="707e8d7d-0e5d-4e4c-ab78-9a4745449b8c" containerID="0d26ef2e1454c2589d64a581f8375a335018c4487afa40e9ba3fd4522c2c473b" exitCode=0 Dec 04 15:36:10 crc kubenswrapper[4946]: I1204 15:36:10.883541 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" event={"ID":"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c","Type":"ContainerDied","Data":"0d26ef2e1454c2589d64a581f8375a335018c4487afa40e9ba3fd4522c2c473b"} Dec 04 15:36:11 crc kubenswrapper[4946]: I1204 15:36:11.047225 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-2d75-account-create-update-dr4ts"] Dec 04 15:36:11 crc kubenswrapper[4946]: I1204 15:36:11.062052 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-2d75-account-create-update-dr4ts"] Dec 04 15:36:11 crc kubenswrapper[4946]: I1204 15:36:11.470367 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6" path="/var/lib/kubelet/pods/4c4043ce-e2d2-4bba-b8d6-ef8cf0343ef6/volumes" Dec 04 15:36:11 crc kubenswrapper[4946]: I1204 15:36:11.472076 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68d63a4d-c7d0-4978-915e-b538b2ed82b1" path="/var/lib/kubelet/pods/68d63a4d-c7d0-4978-915e-b538b2ed82b1/volumes" Dec 04 15:36:11 crc kubenswrapper[4946]: I1204 15:36:11.472915 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86953713-6e7a-4cf7-8817-45785be4930f" path="/var/lib/kubelet/pods/86953713-6e7a-4cf7-8817-45785be4930f/volumes" Dec 04 15:36:11 crc kubenswrapper[4946]: I1204 15:36:11.473714 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9860ec84-49a2-4eb0-8706-3ae5c1673add" path="/var/lib/kubelet/pods/9860ec84-49a2-4eb0-8706-3ae5c1673add/volumes" Dec 04 15:36:11 crc kubenswrapper[4946]: I1204 15:36:11.475388 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2ec6f3e-22c6-496a-b23b-e0d493032eb6" path="/var/lib/kubelet/pods/a2ec6f3e-22c6-496a-b23b-e0d493032eb6/volumes" Dec 04 15:36:11 crc kubenswrapper[4946]: I1204 15:36:11.476191 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb" path="/var/lib/kubelet/pods/c79187df-6c7e-43b5-9b13-cc8ac5a9bbcb/volumes" Dec 04 15:36:12 crc kubenswrapper[4946]: I1204 15:36:12.522169 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:36:12 crc kubenswrapper[4946]: I1204 15:36:12.642260 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-inventory\") pod \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " Dec 04 15:36:12 crc kubenswrapper[4946]: I1204 15:36:12.642354 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-ssh-key\") pod \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " Dec 04 15:36:12 crc kubenswrapper[4946]: I1204 15:36:12.642818 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lv97\" (UniqueName: \"kubernetes.io/projected/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-kube-api-access-8lv97\") pod \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " Dec 04 15:36:12 crc kubenswrapper[4946]: I1204 15:36:12.650693 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-kube-api-access-8lv97" (OuterVolumeSpecName: "kube-api-access-8lv97") pod "707e8d7d-0e5d-4e4c-ab78-9a4745449b8c" (UID: "707e8d7d-0e5d-4e4c-ab78-9a4745449b8c"). InnerVolumeSpecName "kube-api-access-8lv97". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:36:12 crc kubenswrapper[4946]: E1204 15:36:12.696571 4946 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-ssh-key podName:707e8d7d-0e5d-4e4c-ab78-9a4745449b8c nodeName:}" failed. No retries permitted until 2025-12-04 15:36:13.196525395 +0000 UTC m=+2024.082569036 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key" (UniqueName: "kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-ssh-key") pod "707e8d7d-0e5d-4e4c-ab78-9a4745449b8c" (UID: "707e8d7d-0e5d-4e4c-ab78-9a4745449b8c") : error deleting /var/lib/kubelet/pods/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c/volume-subpaths: remove /var/lib/kubelet/pods/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c/volume-subpaths: no such file or directory Dec 04 15:36:12 crc kubenswrapper[4946]: I1204 15:36:12.707273 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-inventory" (OuterVolumeSpecName: "inventory") pod "707e8d7d-0e5d-4e4c-ab78-9a4745449b8c" (UID: "707e8d7d-0e5d-4e4c-ab78-9a4745449b8c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:36:12 crc kubenswrapper[4946]: I1204 15:36:12.748635 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lv97\" (UniqueName: \"kubernetes.io/projected/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-kube-api-access-8lv97\") on node \"crc\" DevicePath \"\"" Dec 04 15:36:12 crc kubenswrapper[4946]: I1204 15:36:12.748680 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:36:12 crc kubenswrapper[4946]: I1204 15:36:12.920636 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" event={"ID":"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c","Type":"ContainerDied","Data":"f9758473d938fc11f89ebba3242aa6dfe74cf293db34cb8f45e2fd1556ddfa06"} Dec 04 15:36:12 crc kubenswrapper[4946]: I1204 15:36:12.920719 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9758473d938fc11f89ebba3242aa6dfe74cf293db34cb8f45e2fd1556ddfa06" Dec 04 15:36:12 crc kubenswrapper[4946]: I1204 15:36:12.921265 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h6x67" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.052322 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm"] Dec 04 15:36:13 crc kubenswrapper[4946]: E1204 15:36:13.053527 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="707e8d7d-0e5d-4e4c-ab78-9a4745449b8c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.053551 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="707e8d7d-0e5d-4e4c-ab78-9a4745449b8c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.053851 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="707e8d7d-0e5d-4e4c-ab78-9a4745449b8c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.055101 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.073024 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm"] Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.162242 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.162315 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.162522 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqpkm\" (UniqueName: \"kubernetes.io/projected/d2eb924b-02a3-41e8-b820-0a89c1420ebc-kube-api-access-gqpkm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.263868 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-ssh-key\") pod \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\" (UID: \"707e8d7d-0e5d-4e4c-ab78-9a4745449b8c\") " Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.264635 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.264815 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqpkm\" (UniqueName: \"kubernetes.io/projected/d2eb924b-02a3-41e8-b820-0a89c1420ebc-kube-api-access-gqpkm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.264879 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.268876 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "707e8d7d-0e5d-4e4c-ab78-9a4745449b8c" (UID: "707e8d7d-0e5d-4e4c-ab78-9a4745449b8c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.280278 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.280791 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.285778 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqpkm\" (UniqueName: \"kubernetes.io/projected/d2eb924b-02a3-41e8-b820-0a89c1420ebc-kube-api-access-gqpkm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.366625 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/707e8d7d-0e5d-4e4c-ab78-9a4745449b8c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:36:13 crc kubenswrapper[4946]: I1204 15:36:13.395647 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:36:14 crc kubenswrapper[4946]: I1204 15:36:14.098663 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm"] Dec 04 15:36:14 crc kubenswrapper[4946]: I1204 15:36:14.106979 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 15:36:14 crc kubenswrapper[4946]: I1204 15:36:14.950375 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" event={"ID":"d2eb924b-02a3-41e8-b820-0a89c1420ebc","Type":"ContainerStarted","Data":"3d5561d10a173c087439a8227f381a5ef89f76c6630f78e007be4a4b92f03fed"} Dec 04 15:36:14 crc kubenswrapper[4946]: I1204 15:36:14.951207 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" event={"ID":"d2eb924b-02a3-41e8-b820-0a89c1420ebc","Type":"ContainerStarted","Data":"c79e14608ca398a3d025df43a6485d43544e785a3a95fac621c7006ea1125243"} Dec 04 15:36:14 crc kubenswrapper[4946]: I1204 15:36:14.989054 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" podStartSLOduration=1.5359246 podStartE2EDuration="1.989026133s" podCreationTimestamp="2025-12-04 15:36:13 +0000 UTC" firstStartedPulling="2025-12-04 15:36:14.106471028 +0000 UTC m=+2024.992514679" lastFinishedPulling="2025-12-04 15:36:14.559572571 +0000 UTC m=+2025.445616212" observedRunningTime="2025-12-04 15:36:14.977721681 +0000 UTC m=+2025.863765322" watchObservedRunningTime="2025-12-04 15:36:14.989026133 +0000 UTC m=+2025.875069784" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.007366 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fzc8f"] Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.013175 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.024541 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fzc8f"] Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.098141 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-catalog-content\") pod \"redhat-operators-fzc8f\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.098262 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-utilities\") pod \"redhat-operators-fzc8f\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.098765 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs7l5\" (UniqueName: \"kubernetes.io/projected/a0482ed7-b860-4399-b274-85f665a03afa-kube-api-access-vs7l5\") pod \"redhat-operators-fzc8f\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.202606 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs7l5\" (UniqueName: \"kubernetes.io/projected/a0482ed7-b860-4399-b274-85f665a03afa-kube-api-access-vs7l5\") pod \"redhat-operators-fzc8f\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.202790 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-catalog-content\") pod \"redhat-operators-fzc8f\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.202852 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-utilities\") pod \"redhat-operators-fzc8f\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.203712 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-utilities\") pod \"redhat-operators-fzc8f\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.204052 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-catalog-content\") pod \"redhat-operators-fzc8f\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.232542 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs7l5\" (UniqueName: \"kubernetes.io/projected/a0482ed7-b860-4399-b274-85f665a03afa-kube-api-access-vs7l5\") pod \"redhat-operators-fzc8f\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.303765 4946 scope.go:117] "RemoveContainer" containerID="267ffbe080c11d25c74b3bf9a7e1e5a4006d14e666ed12a6eac29d148f294126" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.343826 4946 scope.go:117] "RemoveContainer" containerID="a40838ee8e3ee2ded2de941fa076df2821b8ae4ed75c782d9d6730c4733932cc" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.359784 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.407795 4946 scope.go:117] "RemoveContainer" containerID="e46b51b74646d5bd52724cfabafdf8c2d8b25c312f724c407bcee797039f3cef" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.511363 4946 scope.go:117] "RemoveContainer" containerID="920741b2938a779a2e6a267850ce5c3c9c8d3bf182f5b80c6b51fe1867dde70c" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.575779 4946 scope.go:117] "RemoveContainer" containerID="c466309c8472522b4385b9b2a542c9aa161d57b77dfb8bd624ed1560295a6d7f" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.646844 4946 scope.go:117] "RemoveContainer" containerID="2cf164bcb1ae2e32ca2e90cd989a94547ae913e877927ac9aec86b316a27a8a4" Dec 04 15:36:44 crc kubenswrapper[4946]: I1204 15:36:44.956495 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fzc8f"] Dec 04 15:36:45 crc kubenswrapper[4946]: I1204 15:36:45.356934 4946 generic.go:334] "Generic (PLEG): container finished" podID="a0482ed7-b860-4399-b274-85f665a03afa" containerID="e07bf34bda346ad9bedc8323fd77e179ac6f4cb98000b941353ef43fbef7fa3f" exitCode=0 Dec 04 15:36:45 crc kubenswrapper[4946]: I1204 15:36:45.357009 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzc8f" event={"ID":"a0482ed7-b860-4399-b274-85f665a03afa","Type":"ContainerDied","Data":"e07bf34bda346ad9bedc8323fd77e179ac6f4cb98000b941353ef43fbef7fa3f"} Dec 04 15:36:45 crc kubenswrapper[4946]: I1204 15:36:45.357356 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzc8f" event={"ID":"a0482ed7-b860-4399-b274-85f665a03afa","Type":"ContainerStarted","Data":"dbde4c9535cfcf7da5e64d5a1b4f02e87d1de8c4d6672f45eda4b59356874208"} Dec 04 15:36:46 crc kubenswrapper[4946]: I1204 15:36:46.371072 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzc8f" event={"ID":"a0482ed7-b860-4399-b274-85f665a03afa","Type":"ContainerStarted","Data":"77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c"} Dec 04 15:36:50 crc kubenswrapper[4946]: I1204 15:36:50.425962 4946 generic.go:334] "Generic (PLEG): container finished" podID="a0482ed7-b860-4399-b274-85f665a03afa" containerID="77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c" exitCode=0 Dec 04 15:36:50 crc kubenswrapper[4946]: I1204 15:36:50.426075 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzc8f" event={"ID":"a0482ed7-b860-4399-b274-85f665a03afa","Type":"ContainerDied","Data":"77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c"} Dec 04 15:36:51 crc kubenswrapper[4946]: I1204 15:36:51.444708 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzc8f" event={"ID":"a0482ed7-b860-4399-b274-85f665a03afa","Type":"ContainerStarted","Data":"f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c"} Dec 04 15:36:51 crc kubenswrapper[4946]: I1204 15:36:51.506560 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fzc8f" podStartSLOduration=3.028198623 podStartE2EDuration="8.506524044s" podCreationTimestamp="2025-12-04 15:36:43 +0000 UTC" firstStartedPulling="2025-12-04 15:36:45.359343545 +0000 UTC m=+2056.245387186" lastFinishedPulling="2025-12-04 15:36:50.837668956 +0000 UTC m=+2061.723712607" observedRunningTime="2025-12-04 15:36:51.481368312 +0000 UTC m=+2062.367411993" watchObservedRunningTime="2025-12-04 15:36:51.506524044 +0000 UTC m=+2062.392567725" Dec 04 15:36:54 crc kubenswrapper[4946]: I1204 15:36:54.360107 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:54 crc kubenswrapper[4946]: I1204 15:36:54.360949 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:36:55 crc kubenswrapper[4946]: I1204 15:36:55.434726 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fzc8f" podUID="a0482ed7-b860-4399-b274-85f665a03afa" containerName="registry-server" probeResult="failure" output=< Dec 04 15:36:55 crc kubenswrapper[4946]: timeout: failed to connect service ":50051" within 1s Dec 04 15:36:55 crc kubenswrapper[4946]: > Dec 04 15:37:00 crc kubenswrapper[4946]: I1204 15:37:00.056802 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tmjn4"] Dec 04 15:37:00 crc kubenswrapper[4946]: I1204 15:37:00.068415 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-tmjn4"] Dec 04 15:37:01 crc kubenswrapper[4946]: I1204 15:37:01.477681 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b221907-3033-4f08-b4b4-78fca89f7876" path="/var/lib/kubelet/pods/5b221907-3033-4f08-b4b4-78fca89f7876/volumes" Dec 04 15:37:05 crc kubenswrapper[4946]: I1204 15:37:05.432031 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fzc8f" podUID="a0482ed7-b860-4399-b274-85f665a03afa" containerName="registry-server" probeResult="failure" output=< Dec 04 15:37:05 crc kubenswrapper[4946]: timeout: failed to connect service ":50051" within 1s Dec 04 15:37:05 crc kubenswrapper[4946]: > Dec 04 15:37:14 crc kubenswrapper[4946]: I1204 15:37:14.431177 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:37:14 crc kubenswrapper[4946]: I1204 15:37:14.491828 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:37:15 crc kubenswrapper[4946]: I1204 15:37:15.213288 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fzc8f"] Dec 04 15:37:15 crc kubenswrapper[4946]: I1204 15:37:15.806284 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fzc8f" podUID="a0482ed7-b860-4399-b274-85f665a03afa" containerName="registry-server" containerID="cri-o://f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c" gracePeriod=2 Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.451870 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.623201 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-utilities\") pod \"a0482ed7-b860-4399-b274-85f665a03afa\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.623389 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-catalog-content\") pod \"a0482ed7-b860-4399-b274-85f665a03afa\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.623447 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vs7l5\" (UniqueName: \"kubernetes.io/projected/a0482ed7-b860-4399-b274-85f665a03afa-kube-api-access-vs7l5\") pod \"a0482ed7-b860-4399-b274-85f665a03afa\" (UID: \"a0482ed7-b860-4399-b274-85f665a03afa\") " Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.624294 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-utilities" (OuterVolumeSpecName: "utilities") pod "a0482ed7-b860-4399-b274-85f665a03afa" (UID: "a0482ed7-b860-4399-b274-85f665a03afa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.626210 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.632329 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0482ed7-b860-4399-b274-85f665a03afa-kube-api-access-vs7l5" (OuterVolumeSpecName: "kube-api-access-vs7l5") pod "a0482ed7-b860-4399-b274-85f665a03afa" (UID: "a0482ed7-b860-4399-b274-85f665a03afa"). InnerVolumeSpecName "kube-api-access-vs7l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.737905 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vs7l5\" (UniqueName: \"kubernetes.io/projected/a0482ed7-b860-4399-b274-85f665a03afa-kube-api-access-vs7l5\") on node \"crc\" DevicePath \"\"" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.776617 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0482ed7-b860-4399-b274-85f665a03afa" (UID: "a0482ed7-b860-4399-b274-85f665a03afa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.827463 4946 generic.go:334] "Generic (PLEG): container finished" podID="a0482ed7-b860-4399-b274-85f665a03afa" containerID="f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c" exitCode=0 Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.827582 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fzc8f" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.827617 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzc8f" event={"ID":"a0482ed7-b860-4399-b274-85f665a03afa","Type":"ContainerDied","Data":"f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c"} Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.829463 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fzc8f" event={"ID":"a0482ed7-b860-4399-b274-85f665a03afa","Type":"ContainerDied","Data":"dbde4c9535cfcf7da5e64d5a1b4f02e87d1de8c4d6672f45eda4b59356874208"} Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.829517 4946 scope.go:117] "RemoveContainer" containerID="f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.840857 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0482ed7-b860-4399-b274-85f665a03afa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.856561 4946 scope.go:117] "RemoveContainer" containerID="77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.877222 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fzc8f"] Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.888756 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fzc8f"] Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.902379 4946 scope.go:117] "RemoveContainer" containerID="e07bf34bda346ad9bedc8323fd77e179ac6f4cb98000b941353ef43fbef7fa3f" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.942937 4946 scope.go:117] "RemoveContainer" containerID="f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c" Dec 04 15:37:16 crc kubenswrapper[4946]: E1204 15:37:16.943487 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c\": container with ID starting with f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c not found: ID does not exist" containerID="f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.943641 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c"} err="failed to get container status \"f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c\": rpc error: code = NotFound desc = could not find container \"f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c\": container with ID starting with f9d4c7afe7a90a47bdbbf0ad295c6a53b1cbffae142f6d5034e4e12796defe8c not found: ID does not exist" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.943743 4946 scope.go:117] "RemoveContainer" containerID="77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c" Dec 04 15:37:16 crc kubenswrapper[4946]: E1204 15:37:16.944361 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c\": container with ID starting with 77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c not found: ID does not exist" containerID="77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.944405 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c"} err="failed to get container status \"77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c\": rpc error: code = NotFound desc = could not find container \"77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c\": container with ID starting with 77fd94c12d88b60689da4aa45cfcc2df11dc634c9c98954e32f7301eb0189b4c not found: ID does not exist" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.944443 4946 scope.go:117] "RemoveContainer" containerID="e07bf34bda346ad9bedc8323fd77e179ac6f4cb98000b941353ef43fbef7fa3f" Dec 04 15:37:16 crc kubenswrapper[4946]: E1204 15:37:16.944846 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e07bf34bda346ad9bedc8323fd77e179ac6f4cb98000b941353ef43fbef7fa3f\": container with ID starting with e07bf34bda346ad9bedc8323fd77e179ac6f4cb98000b941353ef43fbef7fa3f not found: ID does not exist" containerID="e07bf34bda346ad9bedc8323fd77e179ac6f4cb98000b941353ef43fbef7fa3f" Dec 04 15:37:16 crc kubenswrapper[4946]: I1204 15:37:16.944926 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e07bf34bda346ad9bedc8323fd77e179ac6f4cb98000b941353ef43fbef7fa3f"} err="failed to get container status \"e07bf34bda346ad9bedc8323fd77e179ac6f4cb98000b941353ef43fbef7fa3f\": rpc error: code = NotFound desc = could not find container \"e07bf34bda346ad9bedc8323fd77e179ac6f4cb98000b941353ef43fbef7fa3f\": container with ID starting with e07bf34bda346ad9bedc8323fd77e179ac6f4cb98000b941353ef43fbef7fa3f not found: ID does not exist" Dec 04 15:37:17 crc kubenswrapper[4946]: I1204 15:37:17.476586 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0482ed7-b860-4399-b274-85f665a03afa" path="/var/lib/kubelet/pods/a0482ed7-b860-4399-b274-85f665a03afa/volumes" Dec 04 15:37:22 crc kubenswrapper[4946]: I1204 15:37:22.478890 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:37:22 crc kubenswrapper[4946]: I1204 15:37:22.479756 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:37:31 crc kubenswrapper[4946]: I1204 15:37:31.065145 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-lzwkv"] Dec 04 15:37:31 crc kubenswrapper[4946]: I1204 15:37:31.074073 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-lzwkv"] Dec 04 15:37:31 crc kubenswrapper[4946]: I1204 15:37:31.469409 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e47ae595-d6e7-4eec-828f-98755b3d08b5" path="/var/lib/kubelet/pods/e47ae595-d6e7-4eec-828f-98755b3d08b5/volumes" Dec 04 15:37:33 crc kubenswrapper[4946]: I1204 15:37:33.054872 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hnrx8"] Dec 04 15:37:33 crc kubenswrapper[4946]: I1204 15:37:33.069023 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hnrx8"] Dec 04 15:37:33 crc kubenswrapper[4946]: I1204 15:37:33.476976 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baed508c-c3b8-40d5-a421-5121f9e3f8f5" path="/var/lib/kubelet/pods/baed508c-c3b8-40d5-a421-5121f9e3f8f5/volumes" Dec 04 15:37:40 crc kubenswrapper[4946]: I1204 15:37:40.144234 4946 generic.go:334] "Generic (PLEG): container finished" podID="d2eb924b-02a3-41e8-b820-0a89c1420ebc" containerID="3d5561d10a173c087439a8227f381a5ef89f76c6630f78e007be4a4b92f03fed" exitCode=0 Dec 04 15:37:40 crc kubenswrapper[4946]: I1204 15:37:40.144327 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" event={"ID":"d2eb924b-02a3-41e8-b820-0a89c1420ebc","Type":"ContainerDied","Data":"3d5561d10a173c087439a8227f381a5ef89f76c6630f78e007be4a4b92f03fed"} Dec 04 15:37:41 crc kubenswrapper[4946]: I1204 15:37:41.862675 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:37:41 crc kubenswrapper[4946]: I1204 15:37:41.926922 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-ssh-key\") pod \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " Dec 04 15:37:41 crc kubenswrapper[4946]: I1204 15:37:41.927034 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-inventory\") pod \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " Dec 04 15:37:41 crc kubenswrapper[4946]: I1204 15:37:41.927277 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqpkm\" (UniqueName: \"kubernetes.io/projected/d2eb924b-02a3-41e8-b820-0a89c1420ebc-kube-api-access-gqpkm\") pod \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\" (UID: \"d2eb924b-02a3-41e8-b820-0a89c1420ebc\") " Dec 04 15:37:41 crc kubenswrapper[4946]: I1204 15:37:41.940306 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2eb924b-02a3-41e8-b820-0a89c1420ebc-kube-api-access-gqpkm" (OuterVolumeSpecName: "kube-api-access-gqpkm") pod "d2eb924b-02a3-41e8-b820-0a89c1420ebc" (UID: "d2eb924b-02a3-41e8-b820-0a89c1420ebc"). InnerVolumeSpecName "kube-api-access-gqpkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:37:41 crc kubenswrapper[4946]: I1204 15:37:41.969067 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-inventory" (OuterVolumeSpecName: "inventory") pod "d2eb924b-02a3-41e8-b820-0a89c1420ebc" (UID: "d2eb924b-02a3-41e8-b820-0a89c1420ebc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:37:41 crc kubenswrapper[4946]: I1204 15:37:41.981110 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d2eb924b-02a3-41e8-b820-0a89c1420ebc" (UID: "d2eb924b-02a3-41e8-b820-0a89c1420ebc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.032509 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqpkm\" (UniqueName: \"kubernetes.io/projected/d2eb924b-02a3-41e8-b820-0a89c1420ebc-kube-api-access-gqpkm\") on node \"crc\" DevicePath \"\"" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.032562 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.032575 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2eb924b-02a3-41e8-b820-0a89c1420ebc-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.171216 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" event={"ID":"d2eb924b-02a3-41e8-b820-0a89c1420ebc","Type":"ContainerDied","Data":"c79e14608ca398a3d025df43a6485d43544e785a3a95fac621c7006ea1125243"} Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.171263 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c79e14608ca398a3d025df43a6485d43544e785a3a95fac621c7006ea1125243" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.171324 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.365632 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555"] Dec 04 15:37:42 crc kubenswrapper[4946]: E1204 15:37:42.368058 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0482ed7-b860-4399-b274-85f665a03afa" containerName="extract-content" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.368324 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0482ed7-b860-4399-b274-85f665a03afa" containerName="extract-content" Dec 04 15:37:42 crc kubenswrapper[4946]: E1204 15:37:42.368586 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0482ed7-b860-4399-b274-85f665a03afa" containerName="registry-server" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.368782 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0482ed7-b860-4399-b274-85f665a03afa" containerName="registry-server" Dec 04 15:37:42 crc kubenswrapper[4946]: E1204 15:37:42.374361 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0482ed7-b860-4399-b274-85f665a03afa" containerName="extract-utilities" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.374636 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0482ed7-b860-4399-b274-85f665a03afa" containerName="extract-utilities" Dec 04 15:37:42 crc kubenswrapper[4946]: E1204 15:37:42.375050 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2eb924b-02a3-41e8-b820-0a89c1420ebc" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.375345 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2eb924b-02a3-41e8-b820-0a89c1420ebc" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.376578 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0482ed7-b860-4399-b274-85f665a03afa" containerName="registry-server" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.376704 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2eb924b-02a3-41e8-b820-0a89c1420ebc" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.378343 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.380942 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555"] Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.383540 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.383723 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.383787 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.383953 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.442374 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6l555\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.442443 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5szq\" (UniqueName: \"kubernetes.io/projected/a3311d26-79ab-4472-944b-4d6ac8847a76-kube-api-access-r5szq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6l555\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.442471 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6l555\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.547093 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6l555\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.547210 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5szq\" (UniqueName: \"kubernetes.io/projected/a3311d26-79ab-4472-944b-4d6ac8847a76-kube-api-access-r5szq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6l555\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.547246 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6l555\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.553233 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6l555\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.553523 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6l555\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.569072 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5szq\" (UniqueName: \"kubernetes.io/projected/a3311d26-79ab-4472-944b-4d6ac8847a76-kube-api-access-r5szq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6l555\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:42 crc kubenswrapper[4946]: I1204 15:37:42.703753 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:43 crc kubenswrapper[4946]: I1204 15:37:43.311744 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555"] Dec 04 15:37:44 crc kubenswrapper[4946]: I1204 15:37:44.198183 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" event={"ID":"a3311d26-79ab-4472-944b-4d6ac8847a76","Type":"ContainerStarted","Data":"fdd81412f63ac3e690510e264030dc552aa0b89441cfdc0f5a5afcf56702f2d8"} Dec 04 15:37:44 crc kubenswrapper[4946]: I1204 15:37:44.198758 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" event={"ID":"a3311d26-79ab-4472-944b-4d6ac8847a76","Type":"ContainerStarted","Data":"cc353f8b7c6bd4089eda27d93615b56cecf620494865670e53facd673a551e31"} Dec 04 15:37:44 crc kubenswrapper[4946]: I1204 15:37:44.226944 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" podStartSLOduration=1.805283122 podStartE2EDuration="2.226919695s" podCreationTimestamp="2025-12-04 15:37:42 +0000 UTC" firstStartedPulling="2025-12-04 15:37:43.325742582 +0000 UTC m=+2114.211786233" lastFinishedPulling="2025-12-04 15:37:43.747379165 +0000 UTC m=+2114.633422806" observedRunningTime="2025-12-04 15:37:44.217673168 +0000 UTC m=+2115.103716809" watchObservedRunningTime="2025-12-04 15:37:44.226919695 +0000 UTC m=+2115.112963336" Dec 04 15:37:44 crc kubenswrapper[4946]: I1204 15:37:44.854673 4946 scope.go:117] "RemoveContainer" containerID="cdc3517ae7b0fcce6b3536308c82edfd1c7053cd4782150c9009eb8cfe3a468e" Dec 04 15:37:44 crc kubenswrapper[4946]: I1204 15:37:44.906482 4946 scope.go:117] "RemoveContainer" containerID="bfcabc4a12f7727b6260f34c03e58fa3866948951b8ae0ab6045d59e5bd0c782" Dec 04 15:37:44 crc kubenswrapper[4946]: I1204 15:37:44.989072 4946 scope.go:117] "RemoveContainer" containerID="0dceceb24243ed91dc3044c19eacddb35e8980a5052656405a7ca76c46158eda" Dec 04 15:37:50 crc kubenswrapper[4946]: I1204 15:37:50.269546 4946 generic.go:334] "Generic (PLEG): container finished" podID="a3311d26-79ab-4472-944b-4d6ac8847a76" containerID="fdd81412f63ac3e690510e264030dc552aa0b89441cfdc0f5a5afcf56702f2d8" exitCode=0 Dec 04 15:37:50 crc kubenswrapper[4946]: I1204 15:37:50.269622 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" event={"ID":"a3311d26-79ab-4472-944b-4d6ac8847a76","Type":"ContainerDied","Data":"fdd81412f63ac3e690510e264030dc552aa0b89441cfdc0f5a5afcf56702f2d8"} Dec 04 15:37:51 crc kubenswrapper[4946]: I1204 15:37:51.856223 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:51 crc kubenswrapper[4946]: I1204 15:37:51.937151 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-inventory\") pod \"a3311d26-79ab-4472-944b-4d6ac8847a76\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " Dec 04 15:37:51 crc kubenswrapper[4946]: I1204 15:37:51.937482 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-ssh-key\") pod \"a3311d26-79ab-4472-944b-4d6ac8847a76\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " Dec 04 15:37:51 crc kubenswrapper[4946]: I1204 15:37:51.937512 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5szq\" (UniqueName: \"kubernetes.io/projected/a3311d26-79ab-4472-944b-4d6ac8847a76-kube-api-access-r5szq\") pod \"a3311d26-79ab-4472-944b-4d6ac8847a76\" (UID: \"a3311d26-79ab-4472-944b-4d6ac8847a76\") " Dec 04 15:37:51 crc kubenswrapper[4946]: I1204 15:37:51.947876 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3311d26-79ab-4472-944b-4d6ac8847a76-kube-api-access-r5szq" (OuterVolumeSpecName: "kube-api-access-r5szq") pod "a3311d26-79ab-4472-944b-4d6ac8847a76" (UID: "a3311d26-79ab-4472-944b-4d6ac8847a76"). InnerVolumeSpecName "kube-api-access-r5szq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:37:51 crc kubenswrapper[4946]: I1204 15:37:51.970020 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-inventory" (OuterVolumeSpecName: "inventory") pod "a3311d26-79ab-4472-944b-4d6ac8847a76" (UID: "a3311d26-79ab-4472-944b-4d6ac8847a76"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:37:51 crc kubenswrapper[4946]: I1204 15:37:51.997319 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a3311d26-79ab-4472-944b-4d6ac8847a76" (UID: "a3311d26-79ab-4472-944b-4d6ac8847a76"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.043287 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.043329 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3311d26-79ab-4472-944b-4d6ac8847a76-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.043341 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5szq\" (UniqueName: \"kubernetes.io/projected/a3311d26-79ab-4472-944b-4d6ac8847a76-kube-api-access-r5szq\") on node \"crc\" DevicePath \"\"" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.300225 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" event={"ID":"a3311d26-79ab-4472-944b-4d6ac8847a76","Type":"ContainerDied","Data":"cc353f8b7c6bd4089eda27d93615b56cecf620494865670e53facd673a551e31"} Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.300313 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc353f8b7c6bd4089eda27d93615b56cecf620494865670e53facd673a551e31" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.300335 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6l555" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.404968 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7"] Dec 04 15:37:52 crc kubenswrapper[4946]: E1204 15:37:52.406035 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3311d26-79ab-4472-944b-4d6ac8847a76" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.406097 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3311d26-79ab-4472-944b-4d6ac8847a76" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.406752 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3311d26-79ab-4472-944b-4d6ac8847a76" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.408753 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.412134 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.412322 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.412457 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.418613 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.419391 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7"] Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.458790 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfjss\" (UniqueName: \"kubernetes.io/projected/ab9c79b0-c651-4fdb-aa44-76b66239ef80-kube-api-access-zfjss\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hw8x7\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.458881 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hw8x7\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.459157 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hw8x7\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.486769 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.486846 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.562733 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hw8x7\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.563028 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfjss\" (UniqueName: \"kubernetes.io/projected/ab9c79b0-c651-4fdb-aa44-76b66239ef80-kube-api-access-zfjss\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hw8x7\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.563075 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hw8x7\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.569397 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hw8x7\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.569552 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hw8x7\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.583050 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfjss\" (UniqueName: \"kubernetes.io/projected/ab9c79b0-c651-4fdb-aa44-76b66239ef80-kube-api-access-zfjss\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-hw8x7\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:37:52 crc kubenswrapper[4946]: I1204 15:37:52.736363 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:37:53 crc kubenswrapper[4946]: I1204 15:37:53.512455 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7"] Dec 04 15:37:54 crc kubenswrapper[4946]: I1204 15:37:54.324511 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" event={"ID":"ab9c79b0-c651-4fdb-aa44-76b66239ef80","Type":"ContainerStarted","Data":"44ef0764295ef52fd42a7f552c7674d52f909ee8f40db0537504b712ad5eca9f"} Dec 04 15:37:55 crc kubenswrapper[4946]: I1204 15:37:55.338167 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" event={"ID":"ab9c79b0-c651-4fdb-aa44-76b66239ef80","Type":"ContainerStarted","Data":"0b296467676008fa5475c9beec58f0e52219f503523dbcb40190b93a22156875"} Dec 04 15:37:55 crc kubenswrapper[4946]: I1204 15:37:55.362025 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" podStartSLOduration=2.826321724 podStartE2EDuration="3.362003883s" podCreationTimestamp="2025-12-04 15:37:52 +0000 UTC" firstStartedPulling="2025-12-04 15:37:53.488365793 +0000 UTC m=+2124.374409434" lastFinishedPulling="2025-12-04 15:37:54.024047912 +0000 UTC m=+2124.910091593" observedRunningTime="2025-12-04 15:37:55.361983792 +0000 UTC m=+2126.248027433" watchObservedRunningTime="2025-12-04 15:37:55.362003883 +0000 UTC m=+2126.248047524" Dec 04 15:38:17 crc kubenswrapper[4946]: I1204 15:38:17.079183 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-6fxz5"] Dec 04 15:38:17 crc kubenswrapper[4946]: I1204 15:38:17.095674 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-6fxz5"] Dec 04 15:38:17 crc kubenswrapper[4946]: I1204 15:38:17.468007 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="680312de-5b5b-4622-8bf8-c987b2f87a05" path="/var/lib/kubelet/pods/680312de-5b5b-4622-8bf8-c987b2f87a05/volumes" Dec 04 15:38:22 crc kubenswrapper[4946]: I1204 15:38:22.478787 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:38:22 crc kubenswrapper[4946]: I1204 15:38:22.479476 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:38:22 crc kubenswrapper[4946]: I1204 15:38:22.479533 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:38:22 crc kubenswrapper[4946]: I1204 15:38:22.480439 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f779b15ef6675c268d0553f67a4341f7aaa97f77eb86eee0a5fcf482005b8efc"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:38:22 crc kubenswrapper[4946]: I1204 15:38:22.480493 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://f779b15ef6675c268d0553f67a4341f7aaa97f77eb86eee0a5fcf482005b8efc" gracePeriod=600 Dec 04 15:38:22 crc kubenswrapper[4946]: I1204 15:38:22.696420 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="f779b15ef6675c268d0553f67a4341f7aaa97f77eb86eee0a5fcf482005b8efc" exitCode=0 Dec 04 15:38:22 crc kubenswrapper[4946]: I1204 15:38:22.696505 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"f779b15ef6675c268d0553f67a4341f7aaa97f77eb86eee0a5fcf482005b8efc"} Dec 04 15:38:22 crc kubenswrapper[4946]: I1204 15:38:22.696859 4946 scope.go:117] "RemoveContainer" containerID="7c06216442fd6374cd7f5de24d3ead869216c3781cceb10fdd7e497255cc6a9f" Dec 04 15:38:23 crc kubenswrapper[4946]: I1204 15:38:23.731326 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42"} Dec 04 15:38:34 crc kubenswrapper[4946]: I1204 15:38:34.873730 4946 generic.go:334] "Generic (PLEG): container finished" podID="ab9c79b0-c651-4fdb-aa44-76b66239ef80" containerID="0b296467676008fa5475c9beec58f0e52219f503523dbcb40190b93a22156875" exitCode=0 Dec 04 15:38:34 crc kubenswrapper[4946]: I1204 15:38:34.873803 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" event={"ID":"ab9c79b0-c651-4fdb-aa44-76b66239ef80","Type":"ContainerDied","Data":"0b296467676008fa5475c9beec58f0e52219f503523dbcb40190b93a22156875"} Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.536451 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.670929 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-inventory\") pod \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.671594 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-ssh-key\") pod \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.671883 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfjss\" (UniqueName: \"kubernetes.io/projected/ab9c79b0-c651-4fdb-aa44-76b66239ef80-kube-api-access-zfjss\") pod \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\" (UID: \"ab9c79b0-c651-4fdb-aa44-76b66239ef80\") " Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.683416 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab9c79b0-c651-4fdb-aa44-76b66239ef80-kube-api-access-zfjss" (OuterVolumeSpecName: "kube-api-access-zfjss") pod "ab9c79b0-c651-4fdb-aa44-76b66239ef80" (UID: "ab9c79b0-c651-4fdb-aa44-76b66239ef80"). InnerVolumeSpecName "kube-api-access-zfjss". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.715137 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ab9c79b0-c651-4fdb-aa44-76b66239ef80" (UID: "ab9c79b0-c651-4fdb-aa44-76b66239ef80"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.716581 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-inventory" (OuterVolumeSpecName: "inventory") pod "ab9c79b0-c651-4fdb-aa44-76b66239ef80" (UID: "ab9c79b0-c651-4fdb-aa44-76b66239ef80"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.778762 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfjss\" (UniqueName: \"kubernetes.io/projected/ab9c79b0-c651-4fdb-aa44-76b66239ef80-kube-api-access-zfjss\") on node \"crc\" DevicePath \"\"" Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.778821 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.778836 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab9c79b0-c651-4fdb-aa44-76b66239ef80-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.922145 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" event={"ID":"ab9c79b0-c651-4fdb-aa44-76b66239ef80","Type":"ContainerDied","Data":"44ef0764295ef52fd42a7f552c7674d52f909ee8f40db0537504b712ad5eca9f"} Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.922198 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44ef0764295ef52fd42a7f552c7674d52f909ee8f40db0537504b712ad5eca9f" Dec 04 15:38:36 crc kubenswrapper[4946]: I1204 15:38:36.922304 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-hw8x7" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.011726 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb"] Dec 04 15:38:37 crc kubenswrapper[4946]: E1204 15:38:37.012351 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9c79b0-c651-4fdb-aa44-76b66239ef80" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.012371 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9c79b0-c651-4fdb-aa44-76b66239ef80" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.012647 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab9c79b0-c651-4fdb-aa44-76b66239ef80" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.013536 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.016263 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.016436 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.017706 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.017882 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.055632 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb"] Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.189734 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.189982 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbdvz\" (UniqueName: \"kubernetes.io/projected/3c768922-7c81-4021-ab76-fd151946e8fa-kube-api-access-zbdvz\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.190049 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.293115 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.293674 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbdvz\" (UniqueName: \"kubernetes.io/projected/3c768922-7c81-4021-ab76-fd151946e8fa-kube-api-access-zbdvz\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.293786 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.298053 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.306944 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.317973 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbdvz\" (UniqueName: \"kubernetes.io/projected/3c768922-7c81-4021-ab76-fd151946e8fa-kube-api-access-zbdvz\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:38:37 crc kubenswrapper[4946]: I1204 15:38:37.357245 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:38:38 crc kubenswrapper[4946]: I1204 15:38:38.034784 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb"] Dec 04 15:38:38 crc kubenswrapper[4946]: I1204 15:38:38.947981 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" event={"ID":"3c768922-7c81-4021-ab76-fd151946e8fa","Type":"ContainerStarted","Data":"10bc780ed27fb86f95e5859f1da56001ca993acd3a42af1e1fb56e527cc46c6f"} Dec 04 15:38:38 crc kubenswrapper[4946]: I1204 15:38:38.948607 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" event={"ID":"3c768922-7c81-4021-ab76-fd151946e8fa","Type":"ContainerStarted","Data":"a9c643c13234a903e509fa8be366b471bf20ff77194275bc2036f49d71f48e66"} Dec 04 15:38:38 crc kubenswrapper[4946]: I1204 15:38:38.980519 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" podStartSLOduration=2.549154746 podStartE2EDuration="2.980486078s" podCreationTimestamp="2025-12-04 15:38:36 +0000 UTC" firstStartedPulling="2025-12-04 15:38:38.038802523 +0000 UTC m=+2168.924846164" lastFinishedPulling="2025-12-04 15:38:38.470133855 +0000 UTC m=+2169.356177496" observedRunningTime="2025-12-04 15:38:38.973027439 +0000 UTC m=+2169.859071080" watchObservedRunningTime="2025-12-04 15:38:38.980486078 +0000 UTC m=+2169.866529739" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.193078 4946 scope.go:117] "RemoveContainer" containerID="bd391085bd9ec385a278d2a8cbf6c58260490215b261651618db61a148b8c2a7" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.621944 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mslll"] Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.625740 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.638782 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mslll"] Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.757355 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-utilities\") pod \"redhat-marketplace-mslll\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.757509 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bm7x4\" (UniqueName: \"kubernetes.io/projected/1c101b73-0dac-423b-a5d8-9f41625eafdd-kube-api-access-bm7x4\") pod \"redhat-marketplace-mslll\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.757543 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-catalog-content\") pod \"redhat-marketplace-mslll\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.860316 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-utilities\") pod \"redhat-marketplace-mslll\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.860408 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bm7x4\" (UniqueName: \"kubernetes.io/projected/1c101b73-0dac-423b-a5d8-9f41625eafdd-kube-api-access-bm7x4\") pod \"redhat-marketplace-mslll\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.860439 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-catalog-content\") pod \"redhat-marketplace-mslll\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.861038 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-utilities\") pod \"redhat-marketplace-mslll\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.861061 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-catalog-content\") pod \"redhat-marketplace-mslll\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.886145 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bm7x4\" (UniqueName: \"kubernetes.io/projected/1c101b73-0dac-423b-a5d8-9f41625eafdd-kube-api-access-bm7x4\") pod \"redhat-marketplace-mslll\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:45 crc kubenswrapper[4946]: I1204 15:38:45.954034 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:46 crc kubenswrapper[4946]: I1204 15:38:46.629307 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mslll"] Dec 04 15:38:47 crc kubenswrapper[4946]: I1204 15:38:47.065335 4946 generic.go:334] "Generic (PLEG): container finished" podID="1c101b73-0dac-423b-a5d8-9f41625eafdd" containerID="ba6351fb3387f85be0cd9509840850a3ef478ab97cda09012d8ca4e9d08fb237" exitCode=0 Dec 04 15:38:47 crc kubenswrapper[4946]: I1204 15:38:47.065450 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mslll" event={"ID":"1c101b73-0dac-423b-a5d8-9f41625eafdd","Type":"ContainerDied","Data":"ba6351fb3387f85be0cd9509840850a3ef478ab97cda09012d8ca4e9d08fb237"} Dec 04 15:38:47 crc kubenswrapper[4946]: I1204 15:38:47.065776 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mslll" event={"ID":"1c101b73-0dac-423b-a5d8-9f41625eafdd","Type":"ContainerStarted","Data":"3ad9fb37862a8f2616c68baebe0fa8f9e0bc8fb95a75aca6e91dcb1603d07b1d"} Dec 04 15:38:49 crc kubenswrapper[4946]: I1204 15:38:49.093353 4946 generic.go:334] "Generic (PLEG): container finished" podID="1c101b73-0dac-423b-a5d8-9f41625eafdd" containerID="91dbbc62d46bb47db392f70d72cc37f0e315f7832a8cf4937bf32bff64da57ac" exitCode=0 Dec 04 15:38:49 crc kubenswrapper[4946]: I1204 15:38:49.093421 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mslll" event={"ID":"1c101b73-0dac-423b-a5d8-9f41625eafdd","Type":"ContainerDied","Data":"91dbbc62d46bb47db392f70d72cc37f0e315f7832a8cf4937bf32bff64da57ac"} Dec 04 15:38:50 crc kubenswrapper[4946]: I1204 15:38:50.109072 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mslll" event={"ID":"1c101b73-0dac-423b-a5d8-9f41625eafdd","Type":"ContainerStarted","Data":"d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c"} Dec 04 15:38:50 crc kubenswrapper[4946]: I1204 15:38:50.140982 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mslll" podStartSLOduration=2.688163235 podStartE2EDuration="5.140958125s" podCreationTimestamp="2025-12-04 15:38:45 +0000 UTC" firstStartedPulling="2025-12-04 15:38:47.070271929 +0000 UTC m=+2177.956315570" lastFinishedPulling="2025-12-04 15:38:49.523066819 +0000 UTC m=+2180.409110460" observedRunningTime="2025-12-04 15:38:50.132738635 +0000 UTC m=+2181.018782276" watchObservedRunningTime="2025-12-04 15:38:50.140958125 +0000 UTC m=+2181.027001766" Dec 04 15:38:55 crc kubenswrapper[4946]: I1204 15:38:55.954813 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:55 crc kubenswrapper[4946]: I1204 15:38:55.956031 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:56 crc kubenswrapper[4946]: I1204 15:38:56.030483 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:56 crc kubenswrapper[4946]: I1204 15:38:56.257729 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:56 crc kubenswrapper[4946]: I1204 15:38:56.326504 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mslll"] Dec 04 15:38:58 crc kubenswrapper[4946]: I1204 15:38:58.221305 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mslll" podUID="1c101b73-0dac-423b-a5d8-9f41625eafdd" containerName="registry-server" containerID="cri-o://d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c" gracePeriod=2 Dec 04 15:38:58 crc kubenswrapper[4946]: I1204 15:38:58.944576 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.136648 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-catalog-content\") pod \"1c101b73-0dac-423b-a5d8-9f41625eafdd\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.136746 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bm7x4\" (UniqueName: \"kubernetes.io/projected/1c101b73-0dac-423b-a5d8-9f41625eafdd-kube-api-access-bm7x4\") pod \"1c101b73-0dac-423b-a5d8-9f41625eafdd\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.136892 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-utilities\") pod \"1c101b73-0dac-423b-a5d8-9f41625eafdd\" (UID: \"1c101b73-0dac-423b-a5d8-9f41625eafdd\") " Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.138660 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-utilities" (OuterVolumeSpecName: "utilities") pod "1c101b73-0dac-423b-a5d8-9f41625eafdd" (UID: "1c101b73-0dac-423b-a5d8-9f41625eafdd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.151598 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c101b73-0dac-423b-a5d8-9f41625eafdd-kube-api-access-bm7x4" (OuterVolumeSpecName: "kube-api-access-bm7x4") pod "1c101b73-0dac-423b-a5d8-9f41625eafdd" (UID: "1c101b73-0dac-423b-a5d8-9f41625eafdd"). InnerVolumeSpecName "kube-api-access-bm7x4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.170205 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c101b73-0dac-423b-a5d8-9f41625eafdd" (UID: "1c101b73-0dac-423b-a5d8-9f41625eafdd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.240157 4946 generic.go:334] "Generic (PLEG): container finished" podID="1c101b73-0dac-423b-a5d8-9f41625eafdd" containerID="d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c" exitCode=0 Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.240224 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mslll" event={"ID":"1c101b73-0dac-423b-a5d8-9f41625eafdd","Type":"ContainerDied","Data":"d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c"} Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.240264 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mslll" event={"ID":"1c101b73-0dac-423b-a5d8-9f41625eafdd","Type":"ContainerDied","Data":"3ad9fb37862a8f2616c68baebe0fa8f9e0bc8fb95a75aca6e91dcb1603d07b1d"} Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.240276 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mslll" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.240286 4946 scope.go:117] "RemoveContainer" containerID="d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.240450 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.241417 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bm7x4\" (UniqueName: \"kubernetes.io/projected/1c101b73-0dac-423b-a5d8-9f41625eafdd-kube-api-access-bm7x4\") on node \"crc\" DevicePath \"\"" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.241671 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c101b73-0dac-423b-a5d8-9f41625eafdd-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.294815 4946 scope.go:117] "RemoveContainer" containerID="91dbbc62d46bb47db392f70d72cc37f0e315f7832a8cf4937bf32bff64da57ac" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.302887 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mslll"] Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.322733 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mslll"] Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.326378 4946 scope.go:117] "RemoveContainer" containerID="ba6351fb3387f85be0cd9509840850a3ef478ab97cda09012d8ca4e9d08fb237" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.372032 4946 scope.go:117] "RemoveContainer" containerID="d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c" Dec 04 15:38:59 crc kubenswrapper[4946]: E1204 15:38:59.372847 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c\": container with ID starting with d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c not found: ID does not exist" containerID="d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.372935 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c"} err="failed to get container status \"d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c\": rpc error: code = NotFound desc = could not find container \"d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c\": container with ID starting with d2e76dd0f71114b03e733c6b01b5eb88a4f3119cdf02f735e0610b429cf0361c not found: ID does not exist" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.372998 4946 scope.go:117] "RemoveContainer" containerID="91dbbc62d46bb47db392f70d72cc37f0e315f7832a8cf4937bf32bff64da57ac" Dec 04 15:38:59 crc kubenswrapper[4946]: E1204 15:38:59.373878 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91dbbc62d46bb47db392f70d72cc37f0e315f7832a8cf4937bf32bff64da57ac\": container with ID starting with 91dbbc62d46bb47db392f70d72cc37f0e315f7832a8cf4937bf32bff64da57ac not found: ID does not exist" containerID="91dbbc62d46bb47db392f70d72cc37f0e315f7832a8cf4937bf32bff64da57ac" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.373929 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91dbbc62d46bb47db392f70d72cc37f0e315f7832a8cf4937bf32bff64da57ac"} err="failed to get container status \"91dbbc62d46bb47db392f70d72cc37f0e315f7832a8cf4937bf32bff64da57ac\": rpc error: code = NotFound desc = could not find container \"91dbbc62d46bb47db392f70d72cc37f0e315f7832a8cf4937bf32bff64da57ac\": container with ID starting with 91dbbc62d46bb47db392f70d72cc37f0e315f7832a8cf4937bf32bff64da57ac not found: ID does not exist" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.373959 4946 scope.go:117] "RemoveContainer" containerID="ba6351fb3387f85be0cd9509840850a3ef478ab97cda09012d8ca4e9d08fb237" Dec 04 15:38:59 crc kubenswrapper[4946]: E1204 15:38:59.375557 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba6351fb3387f85be0cd9509840850a3ef478ab97cda09012d8ca4e9d08fb237\": container with ID starting with ba6351fb3387f85be0cd9509840850a3ef478ab97cda09012d8ca4e9d08fb237 not found: ID does not exist" containerID="ba6351fb3387f85be0cd9509840850a3ef478ab97cda09012d8ca4e9d08fb237" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.375646 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba6351fb3387f85be0cd9509840850a3ef478ab97cda09012d8ca4e9d08fb237"} err="failed to get container status \"ba6351fb3387f85be0cd9509840850a3ef478ab97cda09012d8ca4e9d08fb237\": rpc error: code = NotFound desc = could not find container \"ba6351fb3387f85be0cd9509840850a3ef478ab97cda09012d8ca4e9d08fb237\": container with ID starting with ba6351fb3387f85be0cd9509840850a3ef478ab97cda09012d8ca4e9d08fb237 not found: ID does not exist" Dec 04 15:38:59 crc kubenswrapper[4946]: I1204 15:38:59.470545 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c101b73-0dac-423b-a5d8-9f41625eafdd" path="/var/lib/kubelet/pods/1c101b73-0dac-423b-a5d8-9f41625eafdd/volumes" Dec 04 15:39:35 crc kubenswrapper[4946]: I1204 15:39:35.782370 4946 generic.go:334] "Generic (PLEG): container finished" podID="3c768922-7c81-4021-ab76-fd151946e8fa" containerID="10bc780ed27fb86f95e5859f1da56001ca993acd3a42af1e1fb56e527cc46c6f" exitCode=0 Dec 04 15:39:35 crc kubenswrapper[4946]: I1204 15:39:35.782494 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" event={"ID":"3c768922-7c81-4021-ab76-fd151946e8fa","Type":"ContainerDied","Data":"10bc780ed27fb86f95e5859f1da56001ca993acd3a42af1e1fb56e527cc46c6f"} Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.440086 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.624555 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbdvz\" (UniqueName: \"kubernetes.io/projected/3c768922-7c81-4021-ab76-fd151946e8fa-kube-api-access-zbdvz\") pod \"3c768922-7c81-4021-ab76-fd151946e8fa\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.625062 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-ssh-key\") pod \"3c768922-7c81-4021-ab76-fd151946e8fa\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.625294 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-inventory\") pod \"3c768922-7c81-4021-ab76-fd151946e8fa\" (UID: \"3c768922-7c81-4021-ab76-fd151946e8fa\") " Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.640441 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c768922-7c81-4021-ab76-fd151946e8fa-kube-api-access-zbdvz" (OuterVolumeSpecName: "kube-api-access-zbdvz") pod "3c768922-7c81-4021-ab76-fd151946e8fa" (UID: "3c768922-7c81-4021-ab76-fd151946e8fa"). InnerVolumeSpecName "kube-api-access-zbdvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.659217 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-inventory" (OuterVolumeSpecName: "inventory") pod "3c768922-7c81-4021-ab76-fd151946e8fa" (UID: "3c768922-7c81-4021-ab76-fd151946e8fa"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.661453 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3c768922-7c81-4021-ab76-fd151946e8fa" (UID: "3c768922-7c81-4021-ab76-fd151946e8fa"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.729168 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbdvz\" (UniqueName: \"kubernetes.io/projected/3c768922-7c81-4021-ab76-fd151946e8fa-kube-api-access-zbdvz\") on node \"crc\" DevicePath \"\"" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.729200 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.729210 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c768922-7c81-4021-ab76-fd151946e8fa-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.812349 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" event={"ID":"3c768922-7c81-4021-ab76-fd151946e8fa","Type":"ContainerDied","Data":"a9c643c13234a903e509fa8be366b471bf20ff77194275bc2036f49d71f48e66"} Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.812429 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9c643c13234a903e509fa8be366b471bf20ff77194275bc2036f49d71f48e66" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.812487 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.939542 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-vxxvc"] Dec 04 15:39:37 crc kubenswrapper[4946]: E1204 15:39:37.940142 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c768922-7c81-4021-ab76-fd151946e8fa" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.940167 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c768922-7c81-4021-ab76-fd151946e8fa" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:39:37 crc kubenswrapper[4946]: E1204 15:39:37.940202 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c101b73-0dac-423b-a5d8-9f41625eafdd" containerName="registry-server" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.940211 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c101b73-0dac-423b-a5d8-9f41625eafdd" containerName="registry-server" Dec 04 15:39:37 crc kubenswrapper[4946]: E1204 15:39:37.940248 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c101b73-0dac-423b-a5d8-9f41625eafdd" containerName="extract-utilities" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.940257 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c101b73-0dac-423b-a5d8-9f41625eafdd" containerName="extract-utilities" Dec 04 15:39:37 crc kubenswrapper[4946]: E1204 15:39:37.940275 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c101b73-0dac-423b-a5d8-9f41625eafdd" containerName="extract-content" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.940281 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c101b73-0dac-423b-a5d8-9f41625eafdd" containerName="extract-content" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.940503 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c768922-7c81-4021-ab76-fd151946e8fa" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.940524 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c101b73-0dac-423b-a5d8-9f41625eafdd" containerName="registry-server" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.941439 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.944653 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.945340 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.947086 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.947221 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:39:37 crc kubenswrapper[4946]: I1204 15:39:37.974866 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-vxxvc"] Dec 04 15:39:38 crc kubenswrapper[4946]: I1204 15:39:38.140243 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-vxxvc\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:38 crc kubenswrapper[4946]: I1204 15:39:38.140355 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-vxxvc\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:38 crc kubenswrapper[4946]: I1204 15:39:38.140488 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v2kj\" (UniqueName: \"kubernetes.io/projected/55c4138e-0212-42f5-a45c-52eead1474d3-kube-api-access-6v2kj\") pod \"ssh-known-hosts-edpm-deployment-vxxvc\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:38 crc kubenswrapper[4946]: I1204 15:39:38.243698 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-vxxvc\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:38 crc kubenswrapper[4946]: I1204 15:39:38.243784 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-vxxvc\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:38 crc kubenswrapper[4946]: I1204 15:39:38.243866 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v2kj\" (UniqueName: \"kubernetes.io/projected/55c4138e-0212-42f5-a45c-52eead1474d3-kube-api-access-6v2kj\") pod \"ssh-known-hosts-edpm-deployment-vxxvc\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:38 crc kubenswrapper[4946]: I1204 15:39:38.250197 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-vxxvc\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:38 crc kubenswrapper[4946]: I1204 15:39:38.255050 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-vxxvc\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:38 crc kubenswrapper[4946]: I1204 15:39:38.272028 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v2kj\" (UniqueName: \"kubernetes.io/projected/55c4138e-0212-42f5-a45c-52eead1474d3-kube-api-access-6v2kj\") pod \"ssh-known-hosts-edpm-deployment-vxxvc\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:38 crc kubenswrapper[4946]: I1204 15:39:38.570303 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:39 crc kubenswrapper[4946]: I1204 15:39:39.210680 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-vxxvc"] Dec 04 15:39:39 crc kubenswrapper[4946]: I1204 15:39:39.852458 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" event={"ID":"55c4138e-0212-42f5-a45c-52eead1474d3","Type":"ContainerStarted","Data":"2039c2ee5157229f3eb66aee734e4d43d7e87bfd1794f5b943fbfbfe6d551372"} Dec 04 15:39:40 crc kubenswrapper[4946]: I1204 15:39:40.869523 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" event={"ID":"55c4138e-0212-42f5-a45c-52eead1474d3","Type":"ContainerStarted","Data":"03ab6000203de4e3265b057b603d0b5afd647d701fc1f5218d125f756bb7dfb5"} Dec 04 15:39:40 crc kubenswrapper[4946]: I1204 15:39:40.916989 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" podStartSLOduration=3.4682342090000002 podStartE2EDuration="3.916961206s" podCreationTimestamp="2025-12-04 15:39:37 +0000 UTC" firstStartedPulling="2025-12-04 15:39:39.216207775 +0000 UTC m=+2230.102251416" lastFinishedPulling="2025-12-04 15:39:39.664934772 +0000 UTC m=+2230.550978413" observedRunningTime="2025-12-04 15:39:40.903736413 +0000 UTC m=+2231.789780094" watchObservedRunningTime="2025-12-04 15:39:40.916961206 +0000 UTC m=+2231.803004857" Dec 04 15:39:43 crc kubenswrapper[4946]: I1204 15:39:43.062043 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-jkwmj"] Dec 04 15:39:43 crc kubenswrapper[4946]: I1204 15:39:43.076093 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-jkwmj"] Dec 04 15:39:43 crc kubenswrapper[4946]: I1204 15:39:43.465679 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95ee854c-ccd7-4292-b874-9ce160fc8988" path="/var/lib/kubelet/pods/95ee854c-ccd7-4292-b874-9ce160fc8988/volumes" Dec 04 15:39:45 crc kubenswrapper[4946]: I1204 15:39:45.318250 4946 scope.go:117] "RemoveContainer" containerID="02920522b5698fb370a57eb9b29a1b82f48322d4c0d4df021001f2a9b13db7a9" Dec 04 15:39:47 crc kubenswrapper[4946]: I1204 15:39:47.961924 4946 generic.go:334] "Generic (PLEG): container finished" podID="55c4138e-0212-42f5-a45c-52eead1474d3" containerID="03ab6000203de4e3265b057b603d0b5afd647d701fc1f5218d125f756bb7dfb5" exitCode=0 Dec 04 15:39:47 crc kubenswrapper[4946]: I1204 15:39:47.962071 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" event={"ID":"55c4138e-0212-42f5-a45c-52eead1474d3","Type":"ContainerDied","Data":"03ab6000203de4e3265b057b603d0b5afd647d701fc1f5218d125f756bb7dfb5"} Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.057205 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-mfcpf"] Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.070768 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-mfcpf"] Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.472399 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="534da337-2f9e-42db-b58d-48d43ca79b6d" path="/var/lib/kubelet/pods/534da337-2f9e-42db-b58d-48d43ca79b6d/volumes" Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.580014 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.695915 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6v2kj\" (UniqueName: \"kubernetes.io/projected/55c4138e-0212-42f5-a45c-52eead1474d3-kube-api-access-6v2kj\") pod \"55c4138e-0212-42f5-a45c-52eead1474d3\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.696157 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-ssh-key-openstack-edpm-ipam\") pod \"55c4138e-0212-42f5-a45c-52eead1474d3\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.696356 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-inventory-0\") pod \"55c4138e-0212-42f5-a45c-52eead1474d3\" (UID: \"55c4138e-0212-42f5-a45c-52eead1474d3\") " Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.705292 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55c4138e-0212-42f5-a45c-52eead1474d3-kube-api-access-6v2kj" (OuterVolumeSpecName: "kube-api-access-6v2kj") pod "55c4138e-0212-42f5-a45c-52eead1474d3" (UID: "55c4138e-0212-42f5-a45c-52eead1474d3"). InnerVolumeSpecName "kube-api-access-6v2kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.734350 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "55c4138e-0212-42f5-a45c-52eead1474d3" (UID: "55c4138e-0212-42f5-a45c-52eead1474d3"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.753318 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "55c4138e-0212-42f5-a45c-52eead1474d3" (UID: "55c4138e-0212-42f5-a45c-52eead1474d3"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.799468 4946 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.799520 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6v2kj\" (UniqueName: \"kubernetes.io/projected/55c4138e-0212-42f5-a45c-52eead1474d3-kube-api-access-6v2kj\") on node \"crc\" DevicePath \"\"" Dec 04 15:39:49 crc kubenswrapper[4946]: I1204 15:39:49.799538 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/55c4138e-0212-42f5-a45c-52eead1474d3-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.000414 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" event={"ID":"55c4138e-0212-42f5-a45c-52eead1474d3","Type":"ContainerDied","Data":"2039c2ee5157229f3eb66aee734e4d43d7e87bfd1794f5b943fbfbfe6d551372"} Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.000985 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2039c2ee5157229f3eb66aee734e4d43d7e87bfd1794f5b943fbfbfe6d551372" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.000517 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-vxxvc" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.103563 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc"] Dec 04 15:39:50 crc kubenswrapper[4946]: E1204 15:39:50.104694 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55c4138e-0212-42f5-a45c-52eead1474d3" containerName="ssh-known-hosts-edpm-deployment" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.104726 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="55c4138e-0212-42f5-a45c-52eead1474d3" containerName="ssh-known-hosts-edpm-deployment" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.105314 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="55c4138e-0212-42f5-a45c-52eead1474d3" containerName="ssh-known-hosts-edpm-deployment" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.106959 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.110073 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.110389 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.110476 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.112992 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.118806 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc"] Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.210013 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnssh\" (UniqueName: \"kubernetes.io/projected/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-kube-api-access-qnssh\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dztmc\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.210155 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dztmc\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.210228 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dztmc\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.312790 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnssh\" (UniqueName: \"kubernetes.io/projected/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-kube-api-access-qnssh\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dztmc\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.312913 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dztmc\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.313014 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dztmc\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.318510 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dztmc\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.319640 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dztmc\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.340663 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnssh\" (UniqueName: \"kubernetes.io/projected/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-kube-api-access-qnssh\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dztmc\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:39:50 crc kubenswrapper[4946]: I1204 15:39:50.447840 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:39:51 crc kubenswrapper[4946]: I1204 15:39:51.044809 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc"] Dec 04 15:39:52 crc kubenswrapper[4946]: I1204 15:39:52.086785 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" event={"ID":"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207","Type":"ContainerStarted","Data":"cec802280e52912beab58e1176e6d883ca927508ecf2afaa64d2d8bd581b7c45"} Dec 04 15:39:52 crc kubenswrapper[4946]: I1204 15:39:52.088744 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" event={"ID":"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207","Type":"ContainerStarted","Data":"c1405b624a0fa42ea203f317d585005038e9debd3613f19ba89634b45d2d405a"} Dec 04 15:40:01 crc kubenswrapper[4946]: I1204 15:40:01.223164 4946 generic.go:334] "Generic (PLEG): container finished" podID="e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207" containerID="cec802280e52912beab58e1176e6d883ca927508ecf2afaa64d2d8bd581b7c45" exitCode=0 Dec 04 15:40:01 crc kubenswrapper[4946]: I1204 15:40:01.223255 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" event={"ID":"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207","Type":"ContainerDied","Data":"cec802280e52912beab58e1176e6d883ca927508ecf2afaa64d2d8bd581b7c45"} Dec 04 15:40:02 crc kubenswrapper[4946]: I1204 15:40:02.753326 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:40:02 crc kubenswrapper[4946]: I1204 15:40:02.899562 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnssh\" (UniqueName: \"kubernetes.io/projected/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-kube-api-access-qnssh\") pod \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " Dec 04 15:40:02 crc kubenswrapper[4946]: I1204 15:40:02.899817 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-ssh-key\") pod \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " Dec 04 15:40:02 crc kubenswrapper[4946]: I1204 15:40:02.899912 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-inventory\") pod \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\" (UID: \"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207\") " Dec 04 15:40:02 crc kubenswrapper[4946]: I1204 15:40:02.909039 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-kube-api-access-qnssh" (OuterVolumeSpecName: "kube-api-access-qnssh") pod "e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207" (UID: "e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207"). InnerVolumeSpecName "kube-api-access-qnssh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:40:02 crc kubenswrapper[4946]: I1204 15:40:02.937688 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207" (UID: "e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:40:02 crc kubenswrapper[4946]: I1204 15:40:02.965194 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-inventory" (OuterVolumeSpecName: "inventory") pod "e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207" (UID: "e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.003976 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnssh\" (UniqueName: \"kubernetes.io/projected/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-kube-api-access-qnssh\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.004031 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.004052 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.254972 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" event={"ID":"e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207","Type":"ContainerDied","Data":"c1405b624a0fa42ea203f317d585005038e9debd3613f19ba89634b45d2d405a"} Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.255027 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1405b624a0fa42ea203f317d585005038e9debd3613f19ba89634b45d2d405a" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.255060 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dztmc" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.372278 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc"] Dec 04 15:40:03 crc kubenswrapper[4946]: E1204 15:40:03.373204 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.373237 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.373528 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.374640 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.385976 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.388469 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.388507 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.388729 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.391037 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc"] Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.517684 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv52b\" (UniqueName: \"kubernetes.io/projected/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-kube-api-access-qv52b\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.517786 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.517826 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.622224 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv52b\" (UniqueName: \"kubernetes.io/projected/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-kube-api-access-qv52b\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.622364 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.622413 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.630286 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.634989 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.650987 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv52b\" (UniqueName: \"kubernetes.io/projected/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-kube-api-access-qv52b\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:03 crc kubenswrapper[4946]: I1204 15:40:03.698036 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:04 crc kubenswrapper[4946]: I1204 15:40:04.402778 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc"] Dec 04 15:40:05 crc kubenswrapper[4946]: I1204 15:40:05.291667 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" event={"ID":"c93b77be-2594-456e-a0fc-0a73d3bc6a0b","Type":"ContainerStarted","Data":"ec7212e82a1e7f56614c013f1e351f42c153694b0e85ee6ff21c0c755c20f60c"} Dec 04 15:40:05 crc kubenswrapper[4946]: I1204 15:40:05.292297 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" event={"ID":"c93b77be-2594-456e-a0fc-0a73d3bc6a0b","Type":"ContainerStarted","Data":"41ee13c3cefd9ed2d4fb44eadde631002b41dfc3c193a8fcc2d3b7ea92f034c7"} Dec 04 15:40:05 crc kubenswrapper[4946]: I1204 15:40:05.320314 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" podStartSLOduration=1.8766459549999999 podStartE2EDuration="2.320289686s" podCreationTimestamp="2025-12-04 15:40:03 +0000 UTC" firstStartedPulling="2025-12-04 15:40:04.414385436 +0000 UTC m=+2255.300429087" lastFinishedPulling="2025-12-04 15:40:04.858029137 +0000 UTC m=+2255.744072818" observedRunningTime="2025-12-04 15:40:05.314409449 +0000 UTC m=+2256.200453110" watchObservedRunningTime="2025-12-04 15:40:05.320289686 +0000 UTC m=+2256.206333327" Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.604748 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5vkcp"] Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.608431 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.622562 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5vkcp"] Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.709911 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-catalog-content\") pod \"certified-operators-5vkcp\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.709965 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-965bh\" (UniqueName: \"kubernetes.io/projected/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-kube-api-access-965bh\") pod \"certified-operators-5vkcp\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.710024 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-utilities\") pod \"certified-operators-5vkcp\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.812716 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-catalog-content\") pod \"certified-operators-5vkcp\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.812777 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-965bh\" (UniqueName: \"kubernetes.io/projected/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-kube-api-access-965bh\") pod \"certified-operators-5vkcp\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.812829 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-utilities\") pod \"certified-operators-5vkcp\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.813523 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-utilities\") pod \"certified-operators-5vkcp\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.813824 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-catalog-content\") pod \"certified-operators-5vkcp\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.842453 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-965bh\" (UniqueName: \"kubernetes.io/projected/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-kube-api-access-965bh\") pod \"certified-operators-5vkcp\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:06 crc kubenswrapper[4946]: I1204 15:40:06.946662 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:07 crc kubenswrapper[4946]: I1204 15:40:07.578779 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5vkcp"] Dec 04 15:40:08 crc kubenswrapper[4946]: I1204 15:40:08.333784 4946 generic.go:334] "Generic (PLEG): container finished" podID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" containerID="c545418f47157f53a11f3d05d146a3fb22706066f3f249adb6ce0c633aa1c8ca" exitCode=0 Dec 04 15:40:08 crc kubenswrapper[4946]: I1204 15:40:08.333873 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5vkcp" event={"ID":"c6e570b6-5a79-4f57-ab2a-abb17c2fab62","Type":"ContainerDied","Data":"c545418f47157f53a11f3d05d146a3fb22706066f3f249adb6ce0c633aa1c8ca"} Dec 04 15:40:08 crc kubenswrapper[4946]: I1204 15:40:08.335868 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5vkcp" event={"ID":"c6e570b6-5a79-4f57-ab2a-abb17c2fab62","Type":"ContainerStarted","Data":"7c8700879909b1b88464bc0a30e8bf19778f7c9a57747ee7d45421b4f2c6492a"} Dec 04 15:40:09 crc kubenswrapper[4946]: I1204 15:40:09.350443 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5vkcp" event={"ID":"c6e570b6-5a79-4f57-ab2a-abb17c2fab62","Type":"ContainerStarted","Data":"be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8"} Dec 04 15:40:11 crc kubenswrapper[4946]: I1204 15:40:11.382352 4946 generic.go:334] "Generic (PLEG): container finished" podID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" containerID="be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8" exitCode=0 Dec 04 15:40:11 crc kubenswrapper[4946]: I1204 15:40:11.382453 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5vkcp" event={"ID":"c6e570b6-5a79-4f57-ab2a-abb17c2fab62","Type":"ContainerDied","Data":"be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8"} Dec 04 15:40:12 crc kubenswrapper[4946]: I1204 15:40:12.409427 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5vkcp" event={"ID":"c6e570b6-5a79-4f57-ab2a-abb17c2fab62","Type":"ContainerStarted","Data":"4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625"} Dec 04 15:40:12 crc kubenswrapper[4946]: I1204 15:40:12.445738 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5vkcp" podStartSLOduration=2.983168252 podStartE2EDuration="6.445714985s" podCreationTimestamp="2025-12-04 15:40:06 +0000 UTC" firstStartedPulling="2025-12-04 15:40:08.336767854 +0000 UTC m=+2259.222811495" lastFinishedPulling="2025-12-04 15:40:11.799314567 +0000 UTC m=+2262.685358228" observedRunningTime="2025-12-04 15:40:12.435477431 +0000 UTC m=+2263.321521062" watchObservedRunningTime="2025-12-04 15:40:12.445714985 +0000 UTC m=+2263.331758626" Dec 04 15:40:16 crc kubenswrapper[4946]: I1204 15:40:16.458890 4946 generic.go:334] "Generic (PLEG): container finished" podID="c93b77be-2594-456e-a0fc-0a73d3bc6a0b" containerID="ec7212e82a1e7f56614c013f1e351f42c153694b0e85ee6ff21c0c755c20f60c" exitCode=0 Dec 04 15:40:16 crc kubenswrapper[4946]: I1204 15:40:16.459031 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" event={"ID":"c93b77be-2594-456e-a0fc-0a73d3bc6a0b","Type":"ContainerDied","Data":"ec7212e82a1e7f56614c013f1e351f42c153694b0e85ee6ff21c0c755c20f60c"} Dec 04 15:40:16 crc kubenswrapper[4946]: I1204 15:40:16.947395 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:16 crc kubenswrapper[4946]: I1204 15:40:16.947482 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:17 crc kubenswrapper[4946]: I1204 15:40:17.033194 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:17 crc kubenswrapper[4946]: I1204 15:40:17.595615 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:17 crc kubenswrapper[4946]: I1204 15:40:17.696440 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5vkcp"] Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.058549 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.127496 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-ssh-key\") pod \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.127626 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-inventory\") pod \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.127844 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qv52b\" (UniqueName: \"kubernetes.io/projected/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-kube-api-access-qv52b\") pod \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\" (UID: \"c93b77be-2594-456e-a0fc-0a73d3bc6a0b\") " Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.138260 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-kube-api-access-qv52b" (OuterVolumeSpecName: "kube-api-access-qv52b") pod "c93b77be-2594-456e-a0fc-0a73d3bc6a0b" (UID: "c93b77be-2594-456e-a0fc-0a73d3bc6a0b"). InnerVolumeSpecName "kube-api-access-qv52b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.167597 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-inventory" (OuterVolumeSpecName: "inventory") pod "c93b77be-2594-456e-a0fc-0a73d3bc6a0b" (UID: "c93b77be-2594-456e-a0fc-0a73d3bc6a0b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.169075 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c93b77be-2594-456e-a0fc-0a73d3bc6a0b" (UID: "c93b77be-2594-456e-a0fc-0a73d3bc6a0b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.230983 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.231410 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.231422 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qv52b\" (UniqueName: \"kubernetes.io/projected/c93b77be-2594-456e-a0fc-0a73d3bc6a0b-kube-api-access-qv52b\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.495878 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.495845 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc" event={"ID":"c93b77be-2594-456e-a0fc-0a73d3bc6a0b","Type":"ContainerDied","Data":"41ee13c3cefd9ed2d4fb44eadde631002b41dfc3c193a8fcc2d3b7ea92f034c7"} Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.496005 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41ee13c3cefd9ed2d4fb44eadde631002b41dfc3c193a8fcc2d3b7ea92f034c7" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.589310 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587"] Dec 04 15:40:18 crc kubenswrapper[4946]: E1204 15:40:18.589867 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c93b77be-2594-456e-a0fc-0a73d3bc6a0b" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.589892 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c93b77be-2594-456e-a0fc-0a73d3bc6a0b" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.590228 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c93b77be-2594-456e-a0fc-0a73d3bc6a0b" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.591341 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.599485 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.600214 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.600275 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.600413 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.600953 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.606050 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587"] Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.607053 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.608079 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.608325 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.641937 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdjc9\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-kube-api-access-cdjc9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.642047 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.642097 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.642447 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.642674 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.642708 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.642741 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.642802 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.642850 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.642930 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.643006 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.643143 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.643197 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.643231 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.745438 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.745548 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.745622 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.745663 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.745692 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.745721 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdjc9\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-kube-api-access-cdjc9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.745776 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.745805 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.745890 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.746201 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.746358 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.746438 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.746513 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.746564 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.751029 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.751130 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.751606 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.753099 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.753207 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.753319 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.753446 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.754079 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.754526 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.759568 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.761655 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.762286 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.774382 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.777542 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdjc9\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-kube-api-access-cdjc9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-bn587\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:18 crc kubenswrapper[4946]: I1204 15:40:18.947825 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:40:19 crc kubenswrapper[4946]: I1204 15:40:19.505470 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5vkcp" podUID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" containerName="registry-server" containerID="cri-o://4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625" gracePeriod=2 Dec 04 15:40:19 crc kubenswrapper[4946]: I1204 15:40:19.692789 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587"] Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.353318 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.494024 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-utilities\") pod \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.494300 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-catalog-content\") pod \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.495268 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-965bh\" (UniqueName: \"kubernetes.io/projected/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-kube-api-access-965bh\") pod \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\" (UID: \"c6e570b6-5a79-4f57-ab2a-abb17c2fab62\") " Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.496218 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-utilities" (OuterVolumeSpecName: "utilities") pod "c6e570b6-5a79-4f57-ab2a-abb17c2fab62" (UID: "c6e570b6-5a79-4f57-ab2a-abb17c2fab62"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.502891 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-kube-api-access-965bh" (OuterVolumeSpecName: "kube-api-access-965bh") pod "c6e570b6-5a79-4f57-ab2a-abb17c2fab62" (UID: "c6e570b6-5a79-4f57-ab2a-abb17c2fab62"). InnerVolumeSpecName "kube-api-access-965bh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.527288 4946 generic.go:334] "Generic (PLEG): container finished" podID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" containerID="4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625" exitCode=0 Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.527415 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5vkcp" event={"ID":"c6e570b6-5a79-4f57-ab2a-abb17c2fab62","Type":"ContainerDied","Data":"4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625"} Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.527469 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5vkcp" event={"ID":"c6e570b6-5a79-4f57-ab2a-abb17c2fab62","Type":"ContainerDied","Data":"7c8700879909b1b88464bc0a30e8bf19778f7c9a57747ee7d45421b4f2c6492a"} Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.527505 4946 scope.go:117] "RemoveContainer" containerID="4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.527577 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5vkcp" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.532145 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" event={"ID":"f35809fc-31b6-4c6b-a652-928ed15e187e","Type":"ContainerStarted","Data":"3084902decd2fca856d93120f207caa3d229b284e4b1149e995f1270e1e88f61"} Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.532489 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" event={"ID":"f35809fc-31b6-4c6b-a652-928ed15e187e","Type":"ContainerStarted","Data":"9e446f6ef66226e73d1336add8936bbad2e765a26a83285761a722fccb7dd5a7"} Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.547666 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c6e570b6-5a79-4f57-ab2a-abb17c2fab62" (UID: "c6e570b6-5a79-4f57-ab2a-abb17c2fab62"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.563619 4946 scope.go:117] "RemoveContainer" containerID="be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.577140 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" podStartSLOduration=2.100665427 podStartE2EDuration="2.577099154s" podCreationTimestamp="2025-12-04 15:40:18 +0000 UTC" firstStartedPulling="2025-12-04 15:40:19.696563804 +0000 UTC m=+2270.582607435" lastFinishedPulling="2025-12-04 15:40:20.172997501 +0000 UTC m=+2271.059041162" observedRunningTime="2025-12-04 15:40:20.563180543 +0000 UTC m=+2271.449224204" watchObservedRunningTime="2025-12-04 15:40:20.577099154 +0000 UTC m=+2271.463142795" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.601054 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-965bh\" (UniqueName: \"kubernetes.io/projected/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-kube-api-access-965bh\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.601100 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.601190 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6e570b6-5a79-4f57-ab2a-abb17c2fab62-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.604043 4946 scope.go:117] "RemoveContainer" containerID="c545418f47157f53a11f3d05d146a3fb22706066f3f249adb6ce0c633aa1c8ca" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.637696 4946 scope.go:117] "RemoveContainer" containerID="4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625" Dec 04 15:40:20 crc kubenswrapper[4946]: E1204 15:40:20.638709 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625\": container with ID starting with 4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625 not found: ID does not exist" containerID="4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.638763 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625"} err="failed to get container status \"4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625\": rpc error: code = NotFound desc = could not find container \"4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625\": container with ID starting with 4953c76807e63a3d81a3a8a150f41ff918ce3265ae2526b2292071794e5f2625 not found: ID does not exist" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.638808 4946 scope.go:117] "RemoveContainer" containerID="be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8" Dec 04 15:40:20 crc kubenswrapper[4946]: E1204 15:40:20.639347 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8\": container with ID starting with be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8 not found: ID does not exist" containerID="be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.639391 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8"} err="failed to get container status \"be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8\": rpc error: code = NotFound desc = could not find container \"be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8\": container with ID starting with be48d1593219d3365bc4751fc80a4169082a55dc320ef094accb68aea56d68a8 not found: ID does not exist" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.639423 4946 scope.go:117] "RemoveContainer" containerID="c545418f47157f53a11f3d05d146a3fb22706066f3f249adb6ce0c633aa1c8ca" Dec 04 15:40:20 crc kubenswrapper[4946]: E1204 15:40:20.640243 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c545418f47157f53a11f3d05d146a3fb22706066f3f249adb6ce0c633aa1c8ca\": container with ID starting with c545418f47157f53a11f3d05d146a3fb22706066f3f249adb6ce0c633aa1c8ca not found: ID does not exist" containerID="c545418f47157f53a11f3d05d146a3fb22706066f3f249adb6ce0c633aa1c8ca" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.640320 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c545418f47157f53a11f3d05d146a3fb22706066f3f249adb6ce0c633aa1c8ca"} err="failed to get container status \"c545418f47157f53a11f3d05d146a3fb22706066f3f249adb6ce0c633aa1c8ca\": rpc error: code = NotFound desc = could not find container \"c545418f47157f53a11f3d05d146a3fb22706066f3f249adb6ce0c633aa1c8ca\": container with ID starting with c545418f47157f53a11f3d05d146a3fb22706066f3f249adb6ce0c633aa1c8ca not found: ID does not exist" Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.872178 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5vkcp"] Dec 04 15:40:20 crc kubenswrapper[4946]: I1204 15:40:20.879363 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5vkcp"] Dec 04 15:40:21 crc kubenswrapper[4946]: I1204 15:40:21.476031 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" path="/var/lib/kubelet/pods/c6e570b6-5a79-4f57-ab2a-abb17c2fab62/volumes" Dec 04 15:40:22 crc kubenswrapper[4946]: I1204 15:40:22.478109 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:40:22 crc kubenswrapper[4946]: I1204 15:40:22.478631 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.700503 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-p6lq5"] Dec 04 15:40:28 crc kubenswrapper[4946]: E1204 15:40:28.702412 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" containerName="extract-content" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.702430 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" containerName="extract-content" Dec 04 15:40:28 crc kubenswrapper[4946]: E1204 15:40:28.702510 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" containerName="registry-server" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.702518 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" containerName="registry-server" Dec 04 15:40:28 crc kubenswrapper[4946]: E1204 15:40:28.702581 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" containerName="extract-utilities" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.702590 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" containerName="extract-utilities" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.702946 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6e570b6-5a79-4f57-ab2a-abb17c2fab62" containerName="registry-server" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.705575 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.715396 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p6lq5"] Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.757486 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-catalog-content\") pod \"community-operators-p6lq5\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.757586 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-utilities\") pod \"community-operators-p6lq5\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.757662 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpg4p\" (UniqueName: \"kubernetes.io/projected/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-kube-api-access-jpg4p\") pod \"community-operators-p6lq5\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.859609 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-catalog-content\") pod \"community-operators-p6lq5\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.859686 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-utilities\") pod \"community-operators-p6lq5\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.859759 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpg4p\" (UniqueName: \"kubernetes.io/projected/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-kube-api-access-jpg4p\") pod \"community-operators-p6lq5\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.860305 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-catalog-content\") pod \"community-operators-p6lq5\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.860434 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-utilities\") pod \"community-operators-p6lq5\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:28 crc kubenswrapper[4946]: I1204 15:40:28.894692 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpg4p\" (UniqueName: \"kubernetes.io/projected/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-kube-api-access-jpg4p\") pod \"community-operators-p6lq5\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:29 crc kubenswrapper[4946]: I1204 15:40:29.036245 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:29 crc kubenswrapper[4946]: I1204 15:40:29.635409 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p6lq5"] Dec 04 15:40:29 crc kubenswrapper[4946]: I1204 15:40:29.690482 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6lq5" event={"ID":"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f","Type":"ContainerStarted","Data":"d5e1e98162fdebf4c37e3ae7fa0e4c31d1e2e5077b79c1d2de7097b44defac91"} Dec 04 15:40:30 crc kubenswrapper[4946]: I1204 15:40:30.703765 4946 generic.go:334] "Generic (PLEG): container finished" podID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" containerID="48b026130ac644f7fe738e26b6a91593f8874ff6793fb61d83d2b11f668caa82" exitCode=0 Dec 04 15:40:30 crc kubenswrapper[4946]: I1204 15:40:30.703921 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6lq5" event={"ID":"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f","Type":"ContainerDied","Data":"48b026130ac644f7fe738e26b6a91593f8874ff6793fb61d83d2b11f668caa82"} Dec 04 15:40:32 crc kubenswrapper[4946]: I1204 15:40:32.747717 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6lq5" event={"ID":"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f","Type":"ContainerStarted","Data":"e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4"} Dec 04 15:40:33 crc kubenswrapper[4946]: I1204 15:40:33.765416 4946 generic.go:334] "Generic (PLEG): container finished" podID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" containerID="e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4" exitCode=0 Dec 04 15:40:33 crc kubenswrapper[4946]: I1204 15:40:33.765544 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6lq5" event={"ID":"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f","Type":"ContainerDied","Data":"e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4"} Dec 04 15:40:34 crc kubenswrapper[4946]: I1204 15:40:34.782024 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6lq5" event={"ID":"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f","Type":"ContainerStarted","Data":"ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef"} Dec 04 15:40:34 crc kubenswrapper[4946]: I1204 15:40:34.832718 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-p6lq5" podStartSLOduration=3.303480404 podStartE2EDuration="6.83269115s" podCreationTimestamp="2025-12-04 15:40:28 +0000 UTC" firstStartedPulling="2025-12-04 15:40:30.709494318 +0000 UTC m=+2281.595537999" lastFinishedPulling="2025-12-04 15:40:34.238705064 +0000 UTC m=+2285.124748745" observedRunningTime="2025-12-04 15:40:34.814964076 +0000 UTC m=+2285.701007767" watchObservedRunningTime="2025-12-04 15:40:34.83269115 +0000 UTC m=+2285.718734801" Dec 04 15:40:39 crc kubenswrapper[4946]: I1204 15:40:39.036797 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:39 crc kubenswrapper[4946]: I1204 15:40:39.038043 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:39 crc kubenswrapper[4946]: I1204 15:40:39.123668 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:39 crc kubenswrapper[4946]: I1204 15:40:39.929442 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:40 crc kubenswrapper[4946]: I1204 15:40:40.005650 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p6lq5"] Dec 04 15:40:41 crc kubenswrapper[4946]: I1204 15:40:41.877702 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-p6lq5" podUID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" containerName="registry-server" containerID="cri-o://ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef" gracePeriod=2 Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.492507 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.681216 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpg4p\" (UniqueName: \"kubernetes.io/projected/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-kube-api-access-jpg4p\") pod \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.682002 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-catalog-content\") pod \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.682314 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-utilities\") pod \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\" (UID: \"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f\") " Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.684163 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-utilities" (OuterVolumeSpecName: "utilities") pod "b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" (UID: "b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.692637 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-kube-api-access-jpg4p" (OuterVolumeSpecName: "kube-api-access-jpg4p") pod "b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" (UID: "b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f"). InnerVolumeSpecName "kube-api-access-jpg4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.737584 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" (UID: "b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.785513 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpg4p\" (UniqueName: \"kubernetes.io/projected/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-kube-api-access-jpg4p\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.785555 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.785570 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.892157 4946 generic.go:334] "Generic (PLEG): container finished" podID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" containerID="ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef" exitCode=0 Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.892231 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6lq5" event={"ID":"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f","Type":"ContainerDied","Data":"ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef"} Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.892280 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6lq5" event={"ID":"b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f","Type":"ContainerDied","Data":"d5e1e98162fdebf4c37e3ae7fa0e4c31d1e2e5077b79c1d2de7097b44defac91"} Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.892306 4946 scope.go:117] "RemoveContainer" containerID="ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef" Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.892237 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p6lq5" Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.933434 4946 scope.go:117] "RemoveContainer" containerID="e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4" Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.937463 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p6lq5"] Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.951970 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-p6lq5"] Dec 04 15:40:42 crc kubenswrapper[4946]: I1204 15:40:42.974618 4946 scope.go:117] "RemoveContainer" containerID="48b026130ac644f7fe738e26b6a91593f8874ff6793fb61d83d2b11f668caa82" Dec 04 15:40:43 crc kubenswrapper[4946]: I1204 15:40:43.007559 4946 scope.go:117] "RemoveContainer" containerID="ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef" Dec 04 15:40:43 crc kubenswrapper[4946]: E1204 15:40:43.008108 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef\": container with ID starting with ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef not found: ID does not exist" containerID="ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef" Dec 04 15:40:43 crc kubenswrapper[4946]: I1204 15:40:43.008284 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef"} err="failed to get container status \"ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef\": rpc error: code = NotFound desc = could not find container \"ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef\": container with ID starting with ec3c792a437b6956d87a4fd674427069f9d3cb86f62770c0736ce10584d9b9ef not found: ID does not exist" Dec 04 15:40:43 crc kubenswrapper[4946]: I1204 15:40:43.008337 4946 scope.go:117] "RemoveContainer" containerID="e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4" Dec 04 15:40:43 crc kubenswrapper[4946]: E1204 15:40:43.008884 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4\": container with ID starting with e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4 not found: ID does not exist" containerID="e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4" Dec 04 15:40:43 crc kubenswrapper[4946]: I1204 15:40:43.008945 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4"} err="failed to get container status \"e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4\": rpc error: code = NotFound desc = could not find container \"e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4\": container with ID starting with e05261b70a2ccee4e3c2fd875aa0fcea4ab7a1219f7c68c6ae3fa4957987cdb4 not found: ID does not exist" Dec 04 15:40:43 crc kubenswrapper[4946]: I1204 15:40:43.008997 4946 scope.go:117] "RemoveContainer" containerID="48b026130ac644f7fe738e26b6a91593f8874ff6793fb61d83d2b11f668caa82" Dec 04 15:40:43 crc kubenswrapper[4946]: E1204 15:40:43.009447 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48b026130ac644f7fe738e26b6a91593f8874ff6793fb61d83d2b11f668caa82\": container with ID starting with 48b026130ac644f7fe738e26b6a91593f8874ff6793fb61d83d2b11f668caa82 not found: ID does not exist" containerID="48b026130ac644f7fe738e26b6a91593f8874ff6793fb61d83d2b11f668caa82" Dec 04 15:40:43 crc kubenswrapper[4946]: I1204 15:40:43.009535 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48b026130ac644f7fe738e26b6a91593f8874ff6793fb61d83d2b11f668caa82"} err="failed to get container status \"48b026130ac644f7fe738e26b6a91593f8874ff6793fb61d83d2b11f668caa82\": rpc error: code = NotFound desc = could not find container \"48b026130ac644f7fe738e26b6a91593f8874ff6793fb61d83d2b11f668caa82\": container with ID starting with 48b026130ac644f7fe738e26b6a91593f8874ff6793fb61d83d2b11f668caa82 not found: ID does not exist" Dec 04 15:40:43 crc kubenswrapper[4946]: I1204 15:40:43.466578 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" path="/var/lib/kubelet/pods/b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f/volumes" Dec 04 15:40:45 crc kubenswrapper[4946]: I1204 15:40:45.457002 4946 scope.go:117] "RemoveContainer" containerID="80317987ff1a1e6d9330e502866b5342b36119911829f193cf4011a93906e03d" Dec 04 15:40:52 crc kubenswrapper[4946]: I1204 15:40:52.479014 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:40:52 crc kubenswrapper[4946]: I1204 15:40:52.479971 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:41:05 crc kubenswrapper[4946]: I1204 15:41:05.218396 4946 generic.go:334] "Generic (PLEG): container finished" podID="f35809fc-31b6-4c6b-a652-928ed15e187e" containerID="3084902decd2fca856d93120f207caa3d229b284e4b1149e995f1270e1e88f61" exitCode=0 Dec 04 15:41:05 crc kubenswrapper[4946]: I1204 15:41:05.218493 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" event={"ID":"f35809fc-31b6-4c6b-a652-928ed15e187e","Type":"ContainerDied","Data":"3084902decd2fca856d93120f207caa3d229b284e4b1149e995f1270e1e88f61"} Dec 04 15:41:06 crc kubenswrapper[4946]: I1204 15:41:06.903289 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030373 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-nova-combined-ca-bundle\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030428 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-telemetry-combined-ca-bundle\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030489 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdjc9\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-kube-api-access-cdjc9\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030622 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ssh-key\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030686 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-inventory\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030716 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-repo-setup-combined-ca-bundle\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030786 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030858 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030906 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-bootstrap-combined-ca-bundle\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030935 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-libvirt-combined-ca-bundle\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030961 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.030997 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ovn-combined-ca-bundle\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.031027 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.031144 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-neutron-metadata-combined-ca-bundle\") pod \"f35809fc-31b6-4c6b-a652-928ed15e187e\" (UID: \"f35809fc-31b6-4c6b-a652-928ed15e187e\") " Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.041039 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.041167 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.043203 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.043833 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.044451 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.045584 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.046979 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.048223 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.048293 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.049432 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.050597 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.057339 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-kube-api-access-cdjc9" (OuterVolumeSpecName: "kube-api-access-cdjc9") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "kube-api-access-cdjc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.085853 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-inventory" (OuterVolumeSpecName: "inventory") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.089828 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f35809fc-31b6-4c6b-a652-928ed15e187e" (UID: "f35809fc-31b6-4c6b-a652-928ed15e187e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136221 4946 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136288 4946 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136312 4946 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136330 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdjc9\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-kube-api-access-cdjc9\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136348 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136362 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136379 4946 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136417 4946 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136437 4946 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136458 4946 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136475 4946 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136494 4946 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136514 4946 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35809fc-31b6-4c6b-a652-928ed15e187e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.136531 4946 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f35809fc-31b6-4c6b-a652-928ed15e187e-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.250792 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" event={"ID":"f35809fc-31b6-4c6b-a652-928ed15e187e","Type":"ContainerDied","Data":"9e446f6ef66226e73d1336add8936bbad2e765a26a83285761a722fccb7dd5a7"} Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.250903 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-bn587" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.250931 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e446f6ef66226e73d1336add8936bbad2e765a26a83285761a722fccb7dd5a7" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.442768 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms"] Dec 04 15:41:07 crc kubenswrapper[4946]: E1204 15:41:07.443452 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" containerName="extract-utilities" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.443489 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" containerName="extract-utilities" Dec 04 15:41:07 crc kubenswrapper[4946]: E1204 15:41:07.443661 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35809fc-31b6-4c6b-a652-928ed15e187e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.443694 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35809fc-31b6-4c6b-a652-928ed15e187e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 04 15:41:07 crc kubenswrapper[4946]: E1204 15:41:07.443722 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" containerName="extract-content" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.443736 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" containerName="extract-content" Dec 04 15:41:07 crc kubenswrapper[4946]: E1204 15:41:07.443766 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" containerName="registry-server" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.443779 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" containerName="registry-server" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.444262 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8cbe3b5-2b7f-4bf8-b3c9-8daa3afda09f" containerName="registry-server" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.444317 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35809fc-31b6-4c6b-a652-928ed15e187e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.449286 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.453298 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.453330 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.453518 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.457879 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.466635 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.495861 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms"] Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.551347 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c761f173-f866-4098-adc7-426857a5004c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.551593 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.551715 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.552033 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.552372 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9d9g\" (UniqueName: \"kubernetes.io/projected/c761f173-f866-4098-adc7-426857a5004c-kube-api-access-j9d9g\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.654429 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.654560 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9d9g\" (UniqueName: \"kubernetes.io/projected/c761f173-f866-4098-adc7-426857a5004c-kube-api-access-j9d9g\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.654615 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c761f173-f866-4098-adc7-426857a5004c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.654674 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.654716 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.656916 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c761f173-f866-4098-adc7-426857a5004c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.661471 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.666403 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.666873 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.678837 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9d9g\" (UniqueName: \"kubernetes.io/projected/c761f173-f866-4098-adc7-426857a5004c-kube-api-access-j9d9g\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-8b4ms\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:07 crc kubenswrapper[4946]: I1204 15:41:07.803797 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:41:08 crc kubenswrapper[4946]: I1204 15:41:08.457314 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms"] Dec 04 15:41:09 crc kubenswrapper[4946]: I1204 15:41:09.276850 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" event={"ID":"c761f173-f866-4098-adc7-426857a5004c","Type":"ContainerStarted","Data":"f82cca9faa33f8a96e109cbf862d43a6ccf2f553cd8e3b15ff6bd776e98b9492"} Dec 04 15:41:09 crc kubenswrapper[4946]: I1204 15:41:09.277733 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" event={"ID":"c761f173-f866-4098-adc7-426857a5004c","Type":"ContainerStarted","Data":"d6be3b2c66b687f9e1aa8c84700985af1e11349e779bd5db216893e7b487f91a"} Dec 04 15:41:09 crc kubenswrapper[4946]: I1204 15:41:09.298889 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" podStartSLOduration=1.838907847 podStartE2EDuration="2.298862854s" podCreationTimestamp="2025-12-04 15:41:07 +0000 UTC" firstStartedPulling="2025-12-04 15:41:08.45457673 +0000 UTC m=+2319.340620371" lastFinishedPulling="2025-12-04 15:41:08.914531737 +0000 UTC m=+2319.800575378" observedRunningTime="2025-12-04 15:41:09.295648138 +0000 UTC m=+2320.181691789" watchObservedRunningTime="2025-12-04 15:41:09.298862854 +0000 UTC m=+2320.184906505" Dec 04 15:41:22 crc kubenswrapper[4946]: I1204 15:41:22.480990 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:41:22 crc kubenswrapper[4946]: I1204 15:41:22.482149 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:41:22 crc kubenswrapper[4946]: I1204 15:41:22.482246 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:41:22 crc kubenswrapper[4946]: I1204 15:41:22.484085 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:41:22 crc kubenswrapper[4946]: I1204 15:41:22.484242 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" gracePeriod=600 Dec 04 15:41:22 crc kubenswrapper[4946]: E1204 15:41:22.629884 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:41:22 crc kubenswrapper[4946]: I1204 15:41:22.726495 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" exitCode=0 Dec 04 15:41:22 crc kubenswrapper[4946]: I1204 15:41:22.726547 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42"} Dec 04 15:41:22 crc kubenswrapper[4946]: I1204 15:41:22.726634 4946 scope.go:117] "RemoveContainer" containerID="f779b15ef6675c268d0553f67a4341f7aaa97f77eb86eee0a5fcf482005b8efc" Dec 04 15:41:22 crc kubenswrapper[4946]: I1204 15:41:22.727632 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:41:22 crc kubenswrapper[4946]: E1204 15:41:22.727969 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:41:37 crc kubenswrapper[4946]: I1204 15:41:37.454947 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:41:37 crc kubenswrapper[4946]: E1204 15:41:37.455932 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:41:51 crc kubenswrapper[4946]: I1204 15:41:51.453448 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:41:51 crc kubenswrapper[4946]: E1204 15:41:51.454544 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:42:03 crc kubenswrapper[4946]: I1204 15:42:03.454333 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:42:03 crc kubenswrapper[4946]: E1204 15:42:03.456736 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:42:17 crc kubenswrapper[4946]: I1204 15:42:17.454037 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:42:17 crc kubenswrapper[4946]: E1204 15:42:17.455195 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:42:21 crc kubenswrapper[4946]: I1204 15:42:21.517867 4946 generic.go:334] "Generic (PLEG): container finished" podID="c761f173-f866-4098-adc7-426857a5004c" containerID="f82cca9faa33f8a96e109cbf862d43a6ccf2f553cd8e3b15ff6bd776e98b9492" exitCode=0 Dec 04 15:42:21 crc kubenswrapper[4946]: I1204 15:42:21.518152 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" event={"ID":"c761f173-f866-4098-adc7-426857a5004c","Type":"ContainerDied","Data":"f82cca9faa33f8a96e109cbf862d43a6ccf2f553cd8e3b15ff6bd776e98b9492"} Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.134142 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.211589 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c761f173-f866-4098-adc7-426857a5004c-ovncontroller-config-0\") pod \"c761f173-f866-4098-adc7-426857a5004c\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.211767 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-inventory\") pod \"c761f173-f866-4098-adc7-426857a5004c\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.212061 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ssh-key\") pod \"c761f173-f866-4098-adc7-426857a5004c\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.212106 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9d9g\" (UniqueName: \"kubernetes.io/projected/c761f173-f866-4098-adc7-426857a5004c-kube-api-access-j9d9g\") pod \"c761f173-f866-4098-adc7-426857a5004c\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.212169 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ovn-combined-ca-bundle\") pod \"c761f173-f866-4098-adc7-426857a5004c\" (UID: \"c761f173-f866-4098-adc7-426857a5004c\") " Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.229453 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "c761f173-f866-4098-adc7-426857a5004c" (UID: "c761f173-f866-4098-adc7-426857a5004c"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.242456 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c761f173-f866-4098-adc7-426857a5004c-kube-api-access-j9d9g" (OuterVolumeSpecName: "kube-api-access-j9d9g") pod "c761f173-f866-4098-adc7-426857a5004c" (UID: "c761f173-f866-4098-adc7-426857a5004c"). InnerVolumeSpecName "kube-api-access-j9d9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.266873 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c761f173-f866-4098-adc7-426857a5004c" (UID: "c761f173-f866-4098-adc7-426857a5004c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.308493 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-inventory" (OuterVolumeSpecName: "inventory") pod "c761f173-f866-4098-adc7-426857a5004c" (UID: "c761f173-f866-4098-adc7-426857a5004c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.315002 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.315044 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9d9g\" (UniqueName: \"kubernetes.io/projected/c761f173-f866-4098-adc7-426857a5004c-kube-api-access-j9d9g\") on node \"crc\" DevicePath \"\"" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.315057 4946 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.315067 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c761f173-f866-4098-adc7-426857a5004c-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.355895 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c761f173-f866-4098-adc7-426857a5004c-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "c761f173-f866-4098-adc7-426857a5004c" (UID: "c761f173-f866-4098-adc7-426857a5004c"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.416553 4946 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c761f173-f866-4098-adc7-426857a5004c-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.545559 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" event={"ID":"c761f173-f866-4098-adc7-426857a5004c","Type":"ContainerDied","Data":"d6be3b2c66b687f9e1aa8c84700985af1e11349e779bd5db216893e7b487f91a"} Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.546187 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6be3b2c66b687f9e1aa8c84700985af1e11349e779bd5db216893e7b487f91a" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.545684 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-8b4ms" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.737376 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf"] Dec 04 15:42:23 crc kubenswrapper[4946]: E1204 15:42:23.738218 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c761f173-f866-4098-adc7-426857a5004c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.738255 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c761f173-f866-4098-adc7-426857a5004c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.738587 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c761f173-f866-4098-adc7-426857a5004c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.739910 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.742509 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.742691 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.743413 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.743476 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.743683 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.743747 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.767172 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf"] Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.824485 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.824629 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.824664 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqchr\" (UniqueName: \"kubernetes.io/projected/4370c15e-59ff-447e-a825-c687fde1efe0-kube-api-access-fqchr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.824734 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.824772 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.824947 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.927850 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.928281 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.929398 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.929545 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqchr\" (UniqueName: \"kubernetes.io/projected/4370c15e-59ff-447e-a825-c687fde1efe0-kube-api-access-fqchr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.929703 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.929832 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.935596 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.935657 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.935845 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.935973 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.936654 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:23 crc kubenswrapper[4946]: I1204 15:42:23.948599 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqchr\" (UniqueName: \"kubernetes.io/projected/4370c15e-59ff-447e-a825-c687fde1efe0-kube-api-access-fqchr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:24 crc kubenswrapper[4946]: I1204 15:42:24.064211 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:42:24 crc kubenswrapper[4946]: I1204 15:42:24.704848 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf"] Dec 04 15:42:24 crc kubenswrapper[4946]: I1204 15:42:24.736682 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 15:42:25 crc kubenswrapper[4946]: I1204 15:42:25.578335 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" event={"ID":"4370c15e-59ff-447e-a825-c687fde1efe0","Type":"ContainerStarted","Data":"8edd9ff909031714fa22068449b7ebdef911c700587b751f8bfb680d77e47962"} Dec 04 15:42:26 crc kubenswrapper[4946]: I1204 15:42:26.604773 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" event={"ID":"4370c15e-59ff-447e-a825-c687fde1efe0","Type":"ContainerStarted","Data":"c046fb8333e7d90fe3f77a90189929f19d98cc6539da06e9e80f26650170b082"} Dec 04 15:42:26 crc kubenswrapper[4946]: I1204 15:42:26.635843 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" podStartSLOduration=3.141626268 podStartE2EDuration="3.63581801s" podCreationTimestamp="2025-12-04 15:42:23 +0000 UTC" firstStartedPulling="2025-12-04 15:42:24.736378161 +0000 UTC m=+2395.622421812" lastFinishedPulling="2025-12-04 15:42:25.230569913 +0000 UTC m=+2396.116613554" observedRunningTime="2025-12-04 15:42:26.623507391 +0000 UTC m=+2397.509551042" watchObservedRunningTime="2025-12-04 15:42:26.63581801 +0000 UTC m=+2397.521861661" Dec 04 15:42:28 crc kubenswrapper[4946]: I1204 15:42:28.454205 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:42:28 crc kubenswrapper[4946]: E1204 15:42:28.455225 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:42:41 crc kubenswrapper[4946]: I1204 15:42:41.453183 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:42:41 crc kubenswrapper[4946]: E1204 15:42:41.454246 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:42:54 crc kubenswrapper[4946]: I1204 15:42:54.453840 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:42:54 crc kubenswrapper[4946]: E1204 15:42:54.455603 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:43:05 crc kubenswrapper[4946]: I1204 15:43:05.452997 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:43:05 crc kubenswrapper[4946]: E1204 15:43:05.454930 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:43:19 crc kubenswrapper[4946]: I1204 15:43:19.462390 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:43:19 crc kubenswrapper[4946]: E1204 15:43:19.463522 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:43:22 crc kubenswrapper[4946]: I1204 15:43:22.301683 4946 generic.go:334] "Generic (PLEG): container finished" podID="4370c15e-59ff-447e-a825-c687fde1efe0" containerID="c046fb8333e7d90fe3f77a90189929f19d98cc6539da06e9e80f26650170b082" exitCode=0 Dec 04 15:43:22 crc kubenswrapper[4946]: I1204 15:43:22.301785 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" event={"ID":"4370c15e-59ff-447e-a825-c687fde1efe0","Type":"ContainerDied","Data":"c046fb8333e7d90fe3f77a90189929f19d98cc6539da06e9e80f26650170b082"} Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.043107 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.067141 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-metadata-combined-ca-bundle\") pod \"4370c15e-59ff-447e-a825-c687fde1efe0\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.067229 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-inventory\") pod \"4370c15e-59ff-447e-a825-c687fde1efe0\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.067305 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"4370c15e-59ff-447e-a825-c687fde1efe0\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.067485 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqchr\" (UniqueName: \"kubernetes.io/projected/4370c15e-59ff-447e-a825-c687fde1efe0-kube-api-access-fqchr\") pod \"4370c15e-59ff-447e-a825-c687fde1efe0\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.067568 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-nova-metadata-neutron-config-0\") pod \"4370c15e-59ff-447e-a825-c687fde1efe0\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.067637 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-ssh-key\") pod \"4370c15e-59ff-447e-a825-c687fde1efe0\" (UID: \"4370c15e-59ff-447e-a825-c687fde1efe0\") " Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.082794 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "4370c15e-59ff-447e-a825-c687fde1efe0" (UID: "4370c15e-59ff-447e-a825-c687fde1efe0"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.082861 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4370c15e-59ff-447e-a825-c687fde1efe0-kube-api-access-fqchr" (OuterVolumeSpecName: "kube-api-access-fqchr") pod "4370c15e-59ff-447e-a825-c687fde1efe0" (UID: "4370c15e-59ff-447e-a825-c687fde1efe0"). InnerVolumeSpecName "kube-api-access-fqchr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.107041 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "4370c15e-59ff-447e-a825-c687fde1efe0" (UID: "4370c15e-59ff-447e-a825-c687fde1efe0"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.116707 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-inventory" (OuterVolumeSpecName: "inventory") pod "4370c15e-59ff-447e-a825-c687fde1efe0" (UID: "4370c15e-59ff-447e-a825-c687fde1efe0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.129442 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4370c15e-59ff-447e-a825-c687fde1efe0" (UID: "4370c15e-59ff-447e-a825-c687fde1efe0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.145755 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "4370c15e-59ff-447e-a825-c687fde1efe0" (UID: "4370c15e-59ff-447e-a825-c687fde1efe0"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.171417 4946 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.171469 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.171490 4946 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.171506 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqchr\" (UniqueName: \"kubernetes.io/projected/4370c15e-59ff-447e-a825-c687fde1efe0-kube-api-access-fqchr\") on node \"crc\" DevicePath \"\"" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.171520 4946 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.171532 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4370c15e-59ff-447e-a825-c687fde1efe0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.332790 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" event={"ID":"4370c15e-59ff-447e-a825-c687fde1efe0","Type":"ContainerDied","Data":"8edd9ff909031714fa22068449b7ebdef911c700587b751f8bfb680d77e47962"} Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.333488 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8edd9ff909031714fa22068449b7ebdef911c700587b751f8bfb680d77e47962" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.332892 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.541555 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w"] Dec 04 15:43:24 crc kubenswrapper[4946]: E1204 15:43:24.542702 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4370c15e-59ff-447e-a825-c687fde1efe0" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.542731 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="4370c15e-59ff-447e-a825-c687fde1efe0" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.544002 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="4370c15e-59ff-447e-a825-c687fde1efe0" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.546055 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.549540 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.550449 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.550623 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.550644 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.550832 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.576340 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w"] Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.582688 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.582768 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.582841 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkztl\" (UniqueName: \"kubernetes.io/projected/b0812311-5552-4d94-aa72-d7274447e1f6-kube-api-access-pkztl\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.582930 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.582988 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.684961 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.685019 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.685057 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkztl\" (UniqueName: \"kubernetes.io/projected/b0812311-5552-4d94-aa72-d7274447e1f6-kube-api-access-pkztl\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.685572 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.685797 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.690430 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.690482 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.691081 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.691223 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.709622 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkztl\" (UniqueName: \"kubernetes.io/projected/b0812311-5552-4d94-aa72-d7274447e1f6-kube-api-access-pkztl\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:24 crc kubenswrapper[4946]: I1204 15:43:24.894428 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:43:25 crc kubenswrapper[4946]: I1204 15:43:25.545512 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w"] Dec 04 15:43:26 crc kubenswrapper[4946]: I1204 15:43:26.366042 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" event={"ID":"b0812311-5552-4d94-aa72-d7274447e1f6","Type":"ContainerStarted","Data":"d1fb023f4a982cf74b323b36ff0cbc18d3ee567ed9044fdb2e48882a4e85867f"} Dec 04 15:43:27 crc kubenswrapper[4946]: I1204 15:43:27.381212 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" event={"ID":"b0812311-5552-4d94-aa72-d7274447e1f6","Type":"ContainerStarted","Data":"3ec0f027687904b410572adc5d1cf6c77a0deda8192ac0f1895c0fb7b9b98792"} Dec 04 15:43:27 crc kubenswrapper[4946]: I1204 15:43:27.413802 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" podStartSLOduration=2.941843965 podStartE2EDuration="3.413758191s" podCreationTimestamp="2025-12-04 15:43:24 +0000 UTC" firstStartedPulling="2025-12-04 15:43:25.552563074 +0000 UTC m=+2456.438606755" lastFinishedPulling="2025-12-04 15:43:26.0244773 +0000 UTC m=+2456.910520981" observedRunningTime="2025-12-04 15:43:27.40324802 +0000 UTC m=+2458.289291711" watchObservedRunningTime="2025-12-04 15:43:27.413758191 +0000 UTC m=+2458.299801872" Dec 04 15:43:34 crc kubenswrapper[4946]: I1204 15:43:34.454048 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:43:34 crc kubenswrapper[4946]: E1204 15:43:34.455410 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:43:49 crc kubenswrapper[4946]: I1204 15:43:49.461688 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:43:49 crc kubenswrapper[4946]: E1204 15:43:49.462942 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:44:01 crc kubenswrapper[4946]: I1204 15:44:01.453816 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:44:01 crc kubenswrapper[4946]: E1204 15:44:01.455431 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:44:13 crc kubenswrapper[4946]: I1204 15:44:13.454823 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:44:13 crc kubenswrapper[4946]: E1204 15:44:13.456848 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:44:24 crc kubenswrapper[4946]: I1204 15:44:24.453554 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:44:24 crc kubenswrapper[4946]: E1204 15:44:24.457615 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:44:35 crc kubenswrapper[4946]: I1204 15:44:35.453056 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:44:35 crc kubenswrapper[4946]: E1204 15:44:35.454095 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:44:47 crc kubenswrapper[4946]: I1204 15:44:47.456841 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:44:47 crc kubenswrapper[4946]: E1204 15:44:47.458405 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:44:58 crc kubenswrapper[4946]: I1204 15:44:58.453489 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:44:58 crc kubenswrapper[4946]: E1204 15:44:58.454711 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.172092 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx"] Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.174857 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.178817 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.184991 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.199268 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx"] Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.223957 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vj6n\" (UniqueName: \"kubernetes.io/projected/aecd51e4-b377-433e-b6bf-fbe2635139d2-kube-api-access-7vj6n\") pod \"collect-profiles-29414385-c5jsx\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.224091 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aecd51e4-b377-433e-b6bf-fbe2635139d2-secret-volume\") pod \"collect-profiles-29414385-c5jsx\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.224178 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aecd51e4-b377-433e-b6bf-fbe2635139d2-config-volume\") pod \"collect-profiles-29414385-c5jsx\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.326383 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aecd51e4-b377-433e-b6bf-fbe2635139d2-secret-volume\") pod \"collect-profiles-29414385-c5jsx\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.326498 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aecd51e4-b377-433e-b6bf-fbe2635139d2-config-volume\") pod \"collect-profiles-29414385-c5jsx\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.326683 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vj6n\" (UniqueName: \"kubernetes.io/projected/aecd51e4-b377-433e-b6bf-fbe2635139d2-kube-api-access-7vj6n\") pod \"collect-profiles-29414385-c5jsx\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.328413 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aecd51e4-b377-433e-b6bf-fbe2635139d2-config-volume\") pod \"collect-profiles-29414385-c5jsx\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.346572 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aecd51e4-b377-433e-b6bf-fbe2635139d2-secret-volume\") pod \"collect-profiles-29414385-c5jsx\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.350373 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vj6n\" (UniqueName: \"kubernetes.io/projected/aecd51e4-b377-433e-b6bf-fbe2635139d2-kube-api-access-7vj6n\") pod \"collect-profiles-29414385-c5jsx\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:00 crc kubenswrapper[4946]: I1204 15:45:00.509901 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:01 crc kubenswrapper[4946]: I1204 15:45:01.092333 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx"] Dec 04 15:45:01 crc kubenswrapper[4946]: I1204 15:45:01.779041 4946 generic.go:334] "Generic (PLEG): container finished" podID="aecd51e4-b377-433e-b6bf-fbe2635139d2" containerID="a85f7e59757d9038bd1df272d2e7fbf8005800a8c3e432f03311e8615ba39a5f" exitCode=0 Dec 04 15:45:01 crc kubenswrapper[4946]: I1204 15:45:01.779147 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" event={"ID":"aecd51e4-b377-433e-b6bf-fbe2635139d2","Type":"ContainerDied","Data":"a85f7e59757d9038bd1df272d2e7fbf8005800a8c3e432f03311e8615ba39a5f"} Dec 04 15:45:01 crc kubenswrapper[4946]: I1204 15:45:01.781217 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" event={"ID":"aecd51e4-b377-433e-b6bf-fbe2635139d2","Type":"ContainerStarted","Data":"50258d06ae7173647756bac5d941d1330943070a3276c17732921aacf6c310bf"} Dec 04 15:45:01 crc kubenswrapper[4946]: E1204 15:45:01.983527 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaecd51e4_b377_433e_b6bf_fbe2635139d2.slice/crio-conmon-a85f7e59757d9038bd1df272d2e7fbf8005800a8c3e432f03311e8615ba39a5f.scope\": RecentStats: unable to find data in memory cache]" Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.322816 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.424203 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aecd51e4-b377-433e-b6bf-fbe2635139d2-config-volume\") pod \"aecd51e4-b377-433e-b6bf-fbe2635139d2\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.424374 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vj6n\" (UniqueName: \"kubernetes.io/projected/aecd51e4-b377-433e-b6bf-fbe2635139d2-kube-api-access-7vj6n\") pod \"aecd51e4-b377-433e-b6bf-fbe2635139d2\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.425085 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aecd51e4-b377-433e-b6bf-fbe2635139d2-secret-volume\") pod \"aecd51e4-b377-433e-b6bf-fbe2635139d2\" (UID: \"aecd51e4-b377-433e-b6bf-fbe2635139d2\") " Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.425273 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aecd51e4-b377-433e-b6bf-fbe2635139d2-config-volume" (OuterVolumeSpecName: "config-volume") pod "aecd51e4-b377-433e-b6bf-fbe2635139d2" (UID: "aecd51e4-b377-433e-b6bf-fbe2635139d2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.426191 4946 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aecd51e4-b377-433e-b6bf-fbe2635139d2-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.433843 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aecd51e4-b377-433e-b6bf-fbe2635139d2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "aecd51e4-b377-433e-b6bf-fbe2635139d2" (UID: "aecd51e4-b377-433e-b6bf-fbe2635139d2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.434334 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aecd51e4-b377-433e-b6bf-fbe2635139d2-kube-api-access-7vj6n" (OuterVolumeSpecName: "kube-api-access-7vj6n") pod "aecd51e4-b377-433e-b6bf-fbe2635139d2" (UID: "aecd51e4-b377-433e-b6bf-fbe2635139d2"). InnerVolumeSpecName "kube-api-access-7vj6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.529635 4946 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aecd51e4-b377-433e-b6bf-fbe2635139d2-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.529694 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vj6n\" (UniqueName: \"kubernetes.io/projected/aecd51e4-b377-433e-b6bf-fbe2635139d2-kube-api-access-7vj6n\") on node \"crc\" DevicePath \"\"" Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.808934 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" event={"ID":"aecd51e4-b377-433e-b6bf-fbe2635139d2","Type":"ContainerDied","Data":"50258d06ae7173647756bac5d941d1330943070a3276c17732921aacf6c310bf"} Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.809406 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50258d06ae7173647756bac5d941d1330943070a3276c17732921aacf6c310bf" Dec 04 15:45:03 crc kubenswrapper[4946]: I1204 15:45:03.809061 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414385-c5jsx" Dec 04 15:45:04 crc kubenswrapper[4946]: I1204 15:45:04.446471 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb"] Dec 04 15:45:04 crc kubenswrapper[4946]: I1204 15:45:04.469793 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414340-dzqwb"] Dec 04 15:45:05 crc kubenswrapper[4946]: I1204 15:45:05.477302 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c28e21c-79cb-4fe0-b8f3-247fbce0640c" path="/var/lib/kubelet/pods/9c28e21c-79cb-4fe0-b8f3-247fbce0640c/volumes" Dec 04 15:45:12 crc kubenswrapper[4946]: I1204 15:45:12.454145 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:45:12 crc kubenswrapper[4946]: E1204 15:45:12.455197 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:45:23 crc kubenswrapper[4946]: I1204 15:45:23.454669 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:45:23 crc kubenswrapper[4946]: E1204 15:45:23.458726 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:45:35 crc kubenswrapper[4946]: I1204 15:45:35.453580 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:45:35 crc kubenswrapper[4946]: E1204 15:45:35.455203 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:45:45 crc kubenswrapper[4946]: I1204 15:45:45.708594 4946 scope.go:117] "RemoveContainer" containerID="4656d0bedaa51169d870af586db6eaea2abeb8f959abfaf3444ed036ea78c29c" Dec 04 15:45:47 crc kubenswrapper[4946]: I1204 15:45:47.454395 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:45:47 crc kubenswrapper[4946]: E1204 15:45:47.455546 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:46:02 crc kubenswrapper[4946]: I1204 15:46:02.454692 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:46:02 crc kubenswrapper[4946]: E1204 15:46:02.456451 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:46:16 crc kubenswrapper[4946]: I1204 15:46:16.454213 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:46:16 crc kubenswrapper[4946]: E1204 15:46:16.457419 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:46:29 crc kubenswrapper[4946]: I1204 15:46:29.463219 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:46:30 crc kubenswrapper[4946]: I1204 15:46:30.077218 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"26b96320c6b30595b5c40c61c050a0e4d5683556dd53fa6b01c1bbdb9e56780e"} Dec 04 15:46:50 crc kubenswrapper[4946]: I1204 15:46:50.965545 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nwb55"] Dec 04 15:46:50 crc kubenswrapper[4946]: E1204 15:46:50.968195 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aecd51e4-b377-433e-b6bf-fbe2635139d2" containerName="collect-profiles" Dec 04 15:46:50 crc kubenswrapper[4946]: I1204 15:46:50.968214 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="aecd51e4-b377-433e-b6bf-fbe2635139d2" containerName="collect-profiles" Dec 04 15:46:50 crc kubenswrapper[4946]: I1204 15:46:50.968526 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="aecd51e4-b377-433e-b6bf-fbe2635139d2" containerName="collect-profiles" Dec 04 15:46:50 crc kubenswrapper[4946]: I1204 15:46:50.970287 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:46:50 crc kubenswrapper[4946]: I1204 15:46:50.983818 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nwb55"] Dec 04 15:46:51 crc kubenswrapper[4946]: I1204 15:46:51.020872 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6klg\" (UniqueName: \"kubernetes.io/projected/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-kube-api-access-x6klg\") pod \"redhat-operators-nwb55\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:46:51 crc kubenswrapper[4946]: I1204 15:46:51.020983 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-utilities\") pod \"redhat-operators-nwb55\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:46:51 crc kubenswrapper[4946]: I1204 15:46:51.021019 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-catalog-content\") pod \"redhat-operators-nwb55\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:46:51 crc kubenswrapper[4946]: I1204 15:46:51.123479 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6klg\" (UniqueName: \"kubernetes.io/projected/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-kube-api-access-x6klg\") pod \"redhat-operators-nwb55\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:46:51 crc kubenswrapper[4946]: I1204 15:46:51.123636 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-utilities\") pod \"redhat-operators-nwb55\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:46:51 crc kubenswrapper[4946]: I1204 15:46:51.123672 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-catalog-content\") pod \"redhat-operators-nwb55\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:46:51 crc kubenswrapper[4946]: I1204 15:46:51.124461 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-utilities\") pod \"redhat-operators-nwb55\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:46:51 crc kubenswrapper[4946]: I1204 15:46:51.124533 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-catalog-content\") pod \"redhat-operators-nwb55\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:46:51 crc kubenswrapper[4946]: I1204 15:46:51.159964 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6klg\" (UniqueName: \"kubernetes.io/projected/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-kube-api-access-x6klg\") pod \"redhat-operators-nwb55\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:46:51 crc kubenswrapper[4946]: I1204 15:46:51.312450 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:46:51 crc kubenswrapper[4946]: I1204 15:46:51.844192 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nwb55"] Dec 04 15:46:51 crc kubenswrapper[4946]: W1204 15:46:51.858263 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8ab4d52_47dc_4ee0_9f0c_6fa842d16c1e.slice/crio-dd75ba0c6fe2d2a8956446d4ec4cca3be5a5e958de6be325085fe6dae85238a8 WatchSource:0}: Error finding container dd75ba0c6fe2d2a8956446d4ec4cca3be5a5e958de6be325085fe6dae85238a8: Status 404 returned error can't find the container with id dd75ba0c6fe2d2a8956446d4ec4cca3be5a5e958de6be325085fe6dae85238a8 Dec 04 15:46:52 crc kubenswrapper[4946]: I1204 15:46:52.392708 4946 generic.go:334] "Generic (PLEG): container finished" podID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerID="8988d717e912798b0eed12600afae9b5f9224078d313d45f86015988a27eb16d" exitCode=0 Dec 04 15:46:52 crc kubenswrapper[4946]: I1204 15:46:52.392816 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwb55" event={"ID":"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e","Type":"ContainerDied","Data":"8988d717e912798b0eed12600afae9b5f9224078d313d45f86015988a27eb16d"} Dec 04 15:46:52 crc kubenswrapper[4946]: I1204 15:46:52.393170 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwb55" event={"ID":"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e","Type":"ContainerStarted","Data":"dd75ba0c6fe2d2a8956446d4ec4cca3be5a5e958de6be325085fe6dae85238a8"} Dec 04 15:46:53 crc kubenswrapper[4946]: I1204 15:46:53.407942 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwb55" event={"ID":"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e","Type":"ContainerStarted","Data":"a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6"} Dec 04 15:46:57 crc kubenswrapper[4946]: I1204 15:46:57.487183 4946 generic.go:334] "Generic (PLEG): container finished" podID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerID="a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6" exitCode=0 Dec 04 15:46:57 crc kubenswrapper[4946]: I1204 15:46:57.489726 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwb55" event={"ID":"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e","Type":"ContainerDied","Data":"a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6"} Dec 04 15:46:58 crc kubenswrapper[4946]: I1204 15:46:58.509453 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwb55" event={"ID":"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e","Type":"ContainerStarted","Data":"4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090"} Dec 04 15:46:58 crc kubenswrapper[4946]: I1204 15:46:58.564333 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nwb55" podStartSLOduration=2.999057424 podStartE2EDuration="8.564309386s" podCreationTimestamp="2025-12-04 15:46:50 +0000 UTC" firstStartedPulling="2025-12-04 15:46:52.396452957 +0000 UTC m=+2663.282496588" lastFinishedPulling="2025-12-04 15:46:57.961704899 +0000 UTC m=+2668.847748550" observedRunningTime="2025-12-04 15:46:58.536224676 +0000 UTC m=+2669.422268367" watchObservedRunningTime="2025-12-04 15:46:58.564309386 +0000 UTC m=+2669.450353027" Dec 04 15:47:01 crc kubenswrapper[4946]: I1204 15:47:01.313183 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:47:01 crc kubenswrapper[4946]: I1204 15:47:01.313615 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:47:02 crc kubenswrapper[4946]: I1204 15:47:02.379633 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nwb55" podUID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerName="registry-server" probeResult="failure" output=< Dec 04 15:47:02 crc kubenswrapper[4946]: timeout: failed to connect service ":50051" within 1s Dec 04 15:47:02 crc kubenswrapper[4946]: > Dec 04 15:47:11 crc kubenswrapper[4946]: I1204 15:47:11.411367 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:47:11 crc kubenswrapper[4946]: I1204 15:47:11.479854 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:47:11 crc kubenswrapper[4946]: I1204 15:47:11.672697 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nwb55"] Dec 04 15:47:12 crc kubenswrapper[4946]: I1204 15:47:12.691735 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nwb55" podUID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerName="registry-server" containerID="cri-o://4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090" gracePeriod=2 Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.392094 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.501091 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-utilities\") pod \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.501253 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-catalog-content\") pod \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.501471 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6klg\" (UniqueName: \"kubernetes.io/projected/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-kube-api-access-x6klg\") pod \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\" (UID: \"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e\") " Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.502038 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-utilities" (OuterVolumeSpecName: "utilities") pod "e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" (UID: "e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.503790 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.508525 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-kube-api-access-x6klg" (OuterVolumeSpecName: "kube-api-access-x6klg") pod "e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" (UID: "e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e"). InnerVolumeSpecName "kube-api-access-x6klg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.605596 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6klg\" (UniqueName: \"kubernetes.io/projected/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-kube-api-access-x6klg\") on node \"crc\" DevicePath \"\"" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.633592 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" (UID: "e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.704737 4946 generic.go:334] "Generic (PLEG): container finished" podID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerID="4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090" exitCode=0 Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.704801 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nwb55" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.704793 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwb55" event={"ID":"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e","Type":"ContainerDied","Data":"4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090"} Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.704907 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwb55" event={"ID":"e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e","Type":"ContainerDied","Data":"dd75ba0c6fe2d2a8956446d4ec4cca3be5a5e958de6be325085fe6dae85238a8"} Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.704944 4946 scope.go:117] "RemoveContainer" containerID="4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.707332 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.751215 4946 scope.go:117] "RemoveContainer" containerID="a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.751483 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nwb55"] Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.762223 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nwb55"] Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.794463 4946 scope.go:117] "RemoveContainer" containerID="8988d717e912798b0eed12600afae9b5f9224078d313d45f86015988a27eb16d" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.853891 4946 scope.go:117] "RemoveContainer" containerID="4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090" Dec 04 15:47:13 crc kubenswrapper[4946]: E1204 15:47:13.854519 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090\": container with ID starting with 4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090 not found: ID does not exist" containerID="4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.854583 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090"} err="failed to get container status \"4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090\": rpc error: code = NotFound desc = could not find container \"4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090\": container with ID starting with 4834a1676312f1cef36c7a526bb153a1cfcb1e3f420dc4256c2b3f684c3e1090 not found: ID does not exist" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.854618 4946 scope.go:117] "RemoveContainer" containerID="a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6" Dec 04 15:47:13 crc kubenswrapper[4946]: E1204 15:47:13.855049 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6\": container with ID starting with a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6 not found: ID does not exist" containerID="a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.855105 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6"} err="failed to get container status \"a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6\": rpc error: code = NotFound desc = could not find container \"a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6\": container with ID starting with a2145d2bd5e39b20a4d5c11960f6ab98eb065e19cf32707fd8b883c79ac77ff6 not found: ID does not exist" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.855159 4946 scope.go:117] "RemoveContainer" containerID="8988d717e912798b0eed12600afae9b5f9224078d313d45f86015988a27eb16d" Dec 04 15:47:13 crc kubenswrapper[4946]: E1204 15:47:13.855513 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8988d717e912798b0eed12600afae9b5f9224078d313d45f86015988a27eb16d\": container with ID starting with 8988d717e912798b0eed12600afae9b5f9224078d313d45f86015988a27eb16d not found: ID does not exist" containerID="8988d717e912798b0eed12600afae9b5f9224078d313d45f86015988a27eb16d" Dec 04 15:47:13 crc kubenswrapper[4946]: I1204 15:47:13.855540 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8988d717e912798b0eed12600afae9b5f9224078d313d45f86015988a27eb16d"} err="failed to get container status \"8988d717e912798b0eed12600afae9b5f9224078d313d45f86015988a27eb16d\": rpc error: code = NotFound desc = could not find container \"8988d717e912798b0eed12600afae9b5f9224078d313d45f86015988a27eb16d\": container with ID starting with 8988d717e912798b0eed12600afae9b5f9224078d313d45f86015988a27eb16d not found: ID does not exist" Dec 04 15:47:15 crc kubenswrapper[4946]: I1204 15:47:15.470196 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" path="/var/lib/kubelet/pods/e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e/volumes" Dec 04 15:48:01 crc kubenswrapper[4946]: I1204 15:48:01.441885 4946 generic.go:334] "Generic (PLEG): container finished" podID="b0812311-5552-4d94-aa72-d7274447e1f6" containerID="3ec0f027687904b410572adc5d1cf6c77a0deda8192ac0f1895c0fb7b9b98792" exitCode=0 Dec 04 15:48:01 crc kubenswrapper[4946]: I1204 15:48:01.442021 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" event={"ID":"b0812311-5552-4d94-aa72-d7274447e1f6","Type":"ContainerDied","Data":"3ec0f027687904b410572adc5d1cf6c77a0deda8192ac0f1895c0fb7b9b98792"} Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.018407 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.146498 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-secret-0\") pod \"b0812311-5552-4d94-aa72-d7274447e1f6\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.146632 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkztl\" (UniqueName: \"kubernetes.io/projected/b0812311-5552-4d94-aa72-d7274447e1f6-kube-api-access-pkztl\") pod \"b0812311-5552-4d94-aa72-d7274447e1f6\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.146755 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-ssh-key\") pod \"b0812311-5552-4d94-aa72-d7274447e1f6\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.146783 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-inventory\") pod \"b0812311-5552-4d94-aa72-d7274447e1f6\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.146831 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-combined-ca-bundle\") pod \"b0812311-5552-4d94-aa72-d7274447e1f6\" (UID: \"b0812311-5552-4d94-aa72-d7274447e1f6\") " Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.154578 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "b0812311-5552-4d94-aa72-d7274447e1f6" (UID: "b0812311-5552-4d94-aa72-d7274447e1f6"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.154810 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0812311-5552-4d94-aa72-d7274447e1f6-kube-api-access-pkztl" (OuterVolumeSpecName: "kube-api-access-pkztl") pod "b0812311-5552-4d94-aa72-d7274447e1f6" (UID: "b0812311-5552-4d94-aa72-d7274447e1f6"). InnerVolumeSpecName "kube-api-access-pkztl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.181651 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-inventory" (OuterVolumeSpecName: "inventory") pod "b0812311-5552-4d94-aa72-d7274447e1f6" (UID: "b0812311-5552-4d94-aa72-d7274447e1f6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.194880 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "b0812311-5552-4d94-aa72-d7274447e1f6" (UID: "b0812311-5552-4d94-aa72-d7274447e1f6"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.199841 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b0812311-5552-4d94-aa72-d7274447e1f6" (UID: "b0812311-5552-4d94-aa72-d7274447e1f6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.250412 4946 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.250631 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkztl\" (UniqueName: \"kubernetes.io/projected/b0812311-5552-4d94-aa72-d7274447e1f6-kube-api-access-pkztl\") on node \"crc\" DevicePath \"\"" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.250738 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.250802 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.250867 4946 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0812311-5552-4d94-aa72-d7274447e1f6-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.475794 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.483779 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w" event={"ID":"b0812311-5552-4d94-aa72-d7274447e1f6","Type":"ContainerDied","Data":"d1fb023f4a982cf74b323b36ff0cbc18d3ee567ed9044fdb2e48882a4e85867f"} Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.484093 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1fb023f4a982cf74b323b36ff0cbc18d3ee567ed9044fdb2e48882a4e85867f" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.625449 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5"] Dec 04 15:48:03 crc kubenswrapper[4946]: E1204 15:48:03.627102 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0812311-5552-4d94-aa72-d7274447e1f6" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.627356 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0812311-5552-4d94-aa72-d7274447e1f6" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 04 15:48:03 crc kubenswrapper[4946]: E1204 15:48:03.627580 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerName="registry-server" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.627743 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerName="registry-server" Dec 04 15:48:03 crc kubenswrapper[4946]: E1204 15:48:03.628012 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerName="extract-content" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.628136 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerName="extract-content" Dec 04 15:48:03 crc kubenswrapper[4946]: E1204 15:48:03.628248 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerName="extract-utilities" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.628340 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerName="extract-utilities" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.628783 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8ab4d52-47dc-4ee0-9f0c-6fa842d16c1e" containerName="registry-server" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.628928 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0812311-5552-4d94-aa72-d7274447e1f6" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.630277 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.633951 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.634183 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.634406 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.634754 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.635066 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.635442 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.635848 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.644485 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5"] Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.764910 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z4zw\" (UniqueName: \"kubernetes.io/projected/16f11a61-301b-45bc-9ef4-675b164d4ace-kube-api-access-5z4zw\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.765723 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.765862 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.765946 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.766101 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.766317 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.766871 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.767088 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.767677 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.871688 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.871770 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.871865 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.871890 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z4zw\" (UniqueName: \"kubernetes.io/projected/16f11a61-301b-45bc-9ef4-675b164d4ace-kube-api-access-5z4zw\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.871958 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.871999 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.872020 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.872129 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.872152 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.876686 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.877166 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.879135 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.879044 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.878439 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.880859 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.881421 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.893248 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.907692 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z4zw\" (UniqueName: \"kubernetes.io/projected/16f11a61-301b-45bc-9ef4-675b164d4ace-kube-api-access-5z4zw\") pod \"nova-edpm-deployment-openstack-edpm-ipam-xh4n5\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:03 crc kubenswrapper[4946]: I1204 15:48:03.958728 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:48:04 crc kubenswrapper[4946]: I1204 15:48:04.672025 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5"] Dec 04 15:48:04 crc kubenswrapper[4946]: I1204 15:48:04.684889 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 15:48:05 crc kubenswrapper[4946]: I1204 15:48:05.515615 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" event={"ID":"16f11a61-301b-45bc-9ef4-675b164d4ace","Type":"ContainerStarted","Data":"8ad6f04f5d2c8ec8196f587567265d14430908a8915c6b1d71ccb81997d46fc1"} Dec 04 15:48:06 crc kubenswrapper[4946]: I1204 15:48:06.541737 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" event={"ID":"16f11a61-301b-45bc-9ef4-675b164d4ace","Type":"ContainerStarted","Data":"a3d8f999a572b6d252d81728dcaeec3f906d2f02226adf79b297568c85e25e9b"} Dec 04 15:48:06 crc kubenswrapper[4946]: I1204 15:48:06.594459 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" podStartSLOduration=3.056140499 podStartE2EDuration="3.594429497s" podCreationTimestamp="2025-12-04 15:48:03 +0000 UTC" firstStartedPulling="2025-12-04 15:48:04.684324767 +0000 UTC m=+2735.570368438" lastFinishedPulling="2025-12-04 15:48:05.222613805 +0000 UTC m=+2736.108657436" observedRunningTime="2025-12-04 15:48:06.584574297 +0000 UTC m=+2737.470617938" watchObservedRunningTime="2025-12-04 15:48:06.594429497 +0000 UTC m=+2737.480473148" Dec 04 15:48:52 crc kubenswrapper[4946]: I1204 15:48:52.478383 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:48:52 crc kubenswrapper[4946]: I1204 15:48:52.479290 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:49:22 crc kubenswrapper[4946]: I1204 15:49:22.759688 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:49:22 crc kubenswrapper[4946]: I1204 15:49:22.760533 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:49:52 crc kubenswrapper[4946]: I1204 15:49:52.479806 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:49:52 crc kubenswrapper[4946]: I1204 15:49:52.480892 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:49:52 crc kubenswrapper[4946]: I1204 15:49:52.480977 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:49:52 crc kubenswrapper[4946]: I1204 15:49:52.482318 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"26b96320c6b30595b5c40c61c050a0e4d5683556dd53fa6b01c1bbdb9e56780e"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:49:52 crc kubenswrapper[4946]: I1204 15:49:52.482400 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://26b96320c6b30595b5c40c61c050a0e4d5683556dd53fa6b01c1bbdb9e56780e" gracePeriod=600 Dec 04 15:49:53 crc kubenswrapper[4946]: I1204 15:49:53.283231 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="26b96320c6b30595b5c40c61c050a0e4d5683556dd53fa6b01c1bbdb9e56780e" exitCode=0 Dec 04 15:49:53 crc kubenswrapper[4946]: I1204 15:49:53.283754 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"26b96320c6b30595b5c40c61c050a0e4d5683556dd53fa6b01c1bbdb9e56780e"} Dec 04 15:49:53 crc kubenswrapper[4946]: I1204 15:49:53.284092 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb"} Dec 04 15:49:53 crc kubenswrapper[4946]: I1204 15:49:53.284143 4946 scope.go:117] "RemoveContainer" containerID="cfe4ee508a8a01a143abf0c051685741b51403da0e9b463a3c2c29831fbb8a42" Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.718291 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vjxp9"] Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.722909 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.746638 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vjxp9"] Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.849902 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-utilities\") pod \"certified-operators-vjxp9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.849986 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfx49\" (UniqueName: \"kubernetes.io/projected/d4005adc-f747-4313-b950-179fb31c55c9-kube-api-access-cfx49\") pod \"certified-operators-vjxp9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.850650 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-catalog-content\") pod \"certified-operators-vjxp9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.952981 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-utilities\") pod \"certified-operators-vjxp9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.953051 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfx49\" (UniqueName: \"kubernetes.io/projected/d4005adc-f747-4313-b950-179fb31c55c9-kube-api-access-cfx49\") pod \"certified-operators-vjxp9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.953211 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-catalog-content\") pod \"certified-operators-vjxp9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.953766 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-catalog-content\") pod \"certified-operators-vjxp9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.953757 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-utilities\") pod \"certified-operators-vjxp9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:11 crc kubenswrapper[4946]: I1204 15:50:11.983087 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfx49\" (UniqueName: \"kubernetes.io/projected/d4005adc-f747-4313-b950-179fb31c55c9-kube-api-access-cfx49\") pod \"certified-operators-vjxp9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:12 crc kubenswrapper[4946]: I1204 15:50:12.062319 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:12 crc kubenswrapper[4946]: I1204 15:50:12.608791 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vjxp9"] Dec 04 15:50:12 crc kubenswrapper[4946]: W1204 15:50:12.615084 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4005adc_f747_4313_b950_179fb31c55c9.slice/crio-92c6b9d470ea7e7116b246b51df8d2cee4acde81cf3b037ee3e783755a6be557 WatchSource:0}: Error finding container 92c6b9d470ea7e7116b246b51df8d2cee4acde81cf3b037ee3e783755a6be557: Status 404 returned error can't find the container with id 92c6b9d470ea7e7116b246b51df8d2cee4acde81cf3b037ee3e783755a6be557 Dec 04 15:50:13 crc kubenswrapper[4946]: I1204 15:50:13.571265 4946 generic.go:334] "Generic (PLEG): container finished" podID="d4005adc-f747-4313-b950-179fb31c55c9" containerID="b15db05222b5f77a2542404d39a00ccf3e02debb06ea9fc285f565186e1cf25b" exitCode=0 Dec 04 15:50:13 crc kubenswrapper[4946]: I1204 15:50:13.571395 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjxp9" event={"ID":"d4005adc-f747-4313-b950-179fb31c55c9","Type":"ContainerDied","Data":"b15db05222b5f77a2542404d39a00ccf3e02debb06ea9fc285f565186e1cf25b"} Dec 04 15:50:13 crc kubenswrapper[4946]: I1204 15:50:13.571765 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjxp9" event={"ID":"d4005adc-f747-4313-b950-179fb31c55c9","Type":"ContainerStarted","Data":"92c6b9d470ea7e7116b246b51df8d2cee4acde81cf3b037ee3e783755a6be557"} Dec 04 15:50:16 crc kubenswrapper[4946]: I1204 15:50:16.618640 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjxp9" event={"ID":"d4005adc-f747-4313-b950-179fb31c55c9","Type":"ContainerStarted","Data":"55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7"} Dec 04 15:50:17 crc kubenswrapper[4946]: I1204 15:50:17.632278 4946 generic.go:334] "Generic (PLEG): container finished" podID="d4005adc-f747-4313-b950-179fb31c55c9" containerID="55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7" exitCode=0 Dec 04 15:50:17 crc kubenswrapper[4946]: I1204 15:50:17.632352 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjxp9" event={"ID":"d4005adc-f747-4313-b950-179fb31c55c9","Type":"ContainerDied","Data":"55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7"} Dec 04 15:50:19 crc kubenswrapper[4946]: I1204 15:50:19.665323 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjxp9" event={"ID":"d4005adc-f747-4313-b950-179fb31c55c9","Type":"ContainerStarted","Data":"d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8"} Dec 04 15:50:19 crc kubenswrapper[4946]: I1204 15:50:19.704850 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vjxp9" podStartSLOduration=3.089386368 podStartE2EDuration="8.704805438s" podCreationTimestamp="2025-12-04 15:50:11 +0000 UTC" firstStartedPulling="2025-12-04 15:50:13.574430784 +0000 UTC m=+2864.460474435" lastFinishedPulling="2025-12-04 15:50:19.189849824 +0000 UTC m=+2870.075893505" observedRunningTime="2025-12-04 15:50:19.68949863 +0000 UTC m=+2870.575542291" watchObservedRunningTime="2025-12-04 15:50:19.704805438 +0000 UTC m=+2870.590849089" Dec 04 15:50:22 crc kubenswrapper[4946]: I1204 15:50:22.063390 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:22 crc kubenswrapper[4946]: I1204 15:50:22.063799 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:22 crc kubenswrapper[4946]: I1204 15:50:22.120927 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:32 crc kubenswrapper[4946]: I1204 15:50:32.138979 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:32 crc kubenswrapper[4946]: I1204 15:50:32.214003 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vjxp9"] Dec 04 15:50:32 crc kubenswrapper[4946]: I1204 15:50:32.846962 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vjxp9" podUID="d4005adc-f747-4313-b950-179fb31c55c9" containerName="registry-server" containerID="cri-o://d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8" gracePeriod=2 Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.405605 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.493887 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfx49\" (UniqueName: \"kubernetes.io/projected/d4005adc-f747-4313-b950-179fb31c55c9-kube-api-access-cfx49\") pod \"d4005adc-f747-4313-b950-179fb31c55c9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.510570 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4005adc-f747-4313-b950-179fb31c55c9-kube-api-access-cfx49" (OuterVolumeSpecName: "kube-api-access-cfx49") pod "d4005adc-f747-4313-b950-179fb31c55c9" (UID: "d4005adc-f747-4313-b950-179fb31c55c9"). InnerVolumeSpecName "kube-api-access-cfx49". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.597840 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-utilities\") pod \"d4005adc-f747-4313-b950-179fb31c55c9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.598190 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-utilities" (OuterVolumeSpecName: "utilities") pod "d4005adc-f747-4313-b950-179fb31c55c9" (UID: "d4005adc-f747-4313-b950-179fb31c55c9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.598370 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-catalog-content\") pod \"d4005adc-f747-4313-b950-179fb31c55c9\" (UID: \"d4005adc-f747-4313-b950-179fb31c55c9\") " Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.599406 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.599433 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfx49\" (UniqueName: \"kubernetes.io/projected/d4005adc-f747-4313-b950-179fb31c55c9-kube-api-access-cfx49\") on node \"crc\" DevicePath \"\"" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.649804 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4005adc-f747-4313-b950-179fb31c55c9" (UID: "d4005adc-f747-4313-b950-179fb31c55c9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.702894 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4005adc-f747-4313-b950-179fb31c55c9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.870104 4946 generic.go:334] "Generic (PLEG): container finished" podID="d4005adc-f747-4313-b950-179fb31c55c9" containerID="d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8" exitCode=0 Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.870218 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjxp9" event={"ID":"d4005adc-f747-4313-b950-179fb31c55c9","Type":"ContainerDied","Data":"d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8"} Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.870323 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vjxp9" event={"ID":"d4005adc-f747-4313-b950-179fb31c55c9","Type":"ContainerDied","Data":"92c6b9d470ea7e7116b246b51df8d2cee4acde81cf3b037ee3e783755a6be557"} Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.870358 4946 scope.go:117] "RemoveContainer" containerID="d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.870359 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vjxp9" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.903885 4946 scope.go:117] "RemoveContainer" containerID="55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.928178 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vjxp9"] Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.940072 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vjxp9"] Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.942899 4946 scope.go:117] "RemoveContainer" containerID="b15db05222b5f77a2542404d39a00ccf3e02debb06ea9fc285f565186e1cf25b" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.985021 4946 scope.go:117] "RemoveContainer" containerID="d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8" Dec 04 15:50:33 crc kubenswrapper[4946]: E1204 15:50:33.987934 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8\": container with ID starting with d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8 not found: ID does not exist" containerID="d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.987998 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8"} err="failed to get container status \"d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8\": rpc error: code = NotFound desc = could not find container \"d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8\": container with ID starting with d68012340c9e773b94ddc5018ec3c2830ba8e3ecc74eb4a5859e4200b2f75ec8 not found: ID does not exist" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.988042 4946 scope.go:117] "RemoveContainer" containerID="55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7" Dec 04 15:50:33 crc kubenswrapper[4946]: E1204 15:50:33.989799 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7\": container with ID starting with 55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7 not found: ID does not exist" containerID="55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.989850 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7"} err="failed to get container status \"55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7\": rpc error: code = NotFound desc = could not find container \"55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7\": container with ID starting with 55a25d2a13c4ca08dc11a960473de0bb2fdd40aa6ac38ed247af3ebd5141ccd7 not found: ID does not exist" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.989877 4946 scope.go:117] "RemoveContainer" containerID="b15db05222b5f77a2542404d39a00ccf3e02debb06ea9fc285f565186e1cf25b" Dec 04 15:50:33 crc kubenswrapper[4946]: E1204 15:50:33.992881 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b15db05222b5f77a2542404d39a00ccf3e02debb06ea9fc285f565186e1cf25b\": container with ID starting with b15db05222b5f77a2542404d39a00ccf3e02debb06ea9fc285f565186e1cf25b not found: ID does not exist" containerID="b15db05222b5f77a2542404d39a00ccf3e02debb06ea9fc285f565186e1cf25b" Dec 04 15:50:33 crc kubenswrapper[4946]: I1204 15:50:33.992945 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b15db05222b5f77a2542404d39a00ccf3e02debb06ea9fc285f565186e1cf25b"} err="failed to get container status \"b15db05222b5f77a2542404d39a00ccf3e02debb06ea9fc285f565186e1cf25b\": rpc error: code = NotFound desc = could not find container \"b15db05222b5f77a2542404d39a00ccf3e02debb06ea9fc285f565186e1cf25b\": container with ID starting with b15db05222b5f77a2542404d39a00ccf3e02debb06ea9fc285f565186e1cf25b not found: ID does not exist" Dec 04 15:50:35 crc kubenswrapper[4946]: I1204 15:50:35.469713 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4005adc-f747-4313-b950-179fb31c55c9" path="/var/lib/kubelet/pods/d4005adc-f747-4313-b950-179fb31c55c9/volumes" Dec 04 15:51:04 crc kubenswrapper[4946]: I1204 15:51:04.980207 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-79jzx"] Dec 04 15:51:04 crc kubenswrapper[4946]: E1204 15:51:04.981505 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4005adc-f747-4313-b950-179fb31c55c9" containerName="extract-utilities" Dec 04 15:51:04 crc kubenswrapper[4946]: I1204 15:51:04.981522 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4005adc-f747-4313-b950-179fb31c55c9" containerName="extract-utilities" Dec 04 15:51:04 crc kubenswrapper[4946]: E1204 15:51:04.981554 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4005adc-f747-4313-b950-179fb31c55c9" containerName="extract-content" Dec 04 15:51:04 crc kubenswrapper[4946]: I1204 15:51:04.981560 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4005adc-f747-4313-b950-179fb31c55c9" containerName="extract-content" Dec 04 15:51:04 crc kubenswrapper[4946]: E1204 15:51:04.981587 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4005adc-f747-4313-b950-179fb31c55c9" containerName="registry-server" Dec 04 15:51:04 crc kubenswrapper[4946]: I1204 15:51:04.981593 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4005adc-f747-4313-b950-179fb31c55c9" containerName="registry-server" Dec 04 15:51:04 crc kubenswrapper[4946]: I1204 15:51:04.981885 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4005adc-f747-4313-b950-179fb31c55c9" containerName="registry-server" Dec 04 15:51:04 crc kubenswrapper[4946]: I1204 15:51:04.984005 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:04 crc kubenswrapper[4946]: I1204 15:51:04.995857 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-79jzx"] Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.127230 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-catalog-content\") pod \"community-operators-79jzx\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.127280 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-utilities\") pod \"community-operators-79jzx\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.127927 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhqvd\" (UniqueName: \"kubernetes.io/projected/013f501c-470a-4140-8341-c26979e6af23-kube-api-access-jhqvd\") pod \"community-operators-79jzx\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.230675 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhqvd\" (UniqueName: \"kubernetes.io/projected/013f501c-470a-4140-8341-c26979e6af23-kube-api-access-jhqvd\") pod \"community-operators-79jzx\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.230818 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-catalog-content\") pod \"community-operators-79jzx\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.230848 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-utilities\") pod \"community-operators-79jzx\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.231522 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-utilities\") pod \"community-operators-79jzx\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.231522 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-catalog-content\") pod \"community-operators-79jzx\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.251353 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhqvd\" (UniqueName: \"kubernetes.io/projected/013f501c-470a-4140-8341-c26979e6af23-kube-api-access-jhqvd\") pod \"community-operators-79jzx\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.288124 4946 generic.go:334] "Generic (PLEG): container finished" podID="16f11a61-301b-45bc-9ef4-675b164d4ace" containerID="a3d8f999a572b6d252d81728dcaeec3f906d2f02226adf79b297568c85e25e9b" exitCode=0 Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.288193 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" event={"ID":"16f11a61-301b-45bc-9ef4-675b164d4ace","Type":"ContainerDied","Data":"a3d8f999a572b6d252d81728dcaeec3f906d2f02226adf79b297568c85e25e9b"} Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.356570 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:05 crc kubenswrapper[4946]: I1204 15:51:05.959957 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-79jzx"] Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.301448 4946 generic.go:334] "Generic (PLEG): container finished" podID="013f501c-470a-4140-8341-c26979e6af23" containerID="4c3762d3bbbf135e025fde135fd3f14b5fbbc3087349bbc2009b85b577c013d9" exitCode=0 Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.301514 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-79jzx" event={"ID":"013f501c-470a-4140-8341-c26979e6af23","Type":"ContainerDied","Data":"4c3762d3bbbf135e025fde135fd3f14b5fbbc3087349bbc2009b85b577c013d9"} Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.301854 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-79jzx" event={"ID":"013f501c-470a-4140-8341-c26979e6af23","Type":"ContainerStarted","Data":"25a0cb439edefdfa680a34a193412deb5886b327295c763d66d667d738b9b092"} Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.854207 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.983400 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-inventory\") pod \"16f11a61-301b-45bc-9ef4-675b164d4ace\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.983476 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-1\") pod \"16f11a61-301b-45bc-9ef4-675b164d4ace\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.983540 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-combined-ca-bundle\") pod \"16f11a61-301b-45bc-9ef4-675b164d4ace\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.983590 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-0\") pod \"16f11a61-301b-45bc-9ef4-675b164d4ace\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.983808 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-0\") pod \"16f11a61-301b-45bc-9ef4-675b164d4ace\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.983895 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5z4zw\" (UniqueName: \"kubernetes.io/projected/16f11a61-301b-45bc-9ef4-675b164d4ace-kube-api-access-5z4zw\") pod \"16f11a61-301b-45bc-9ef4-675b164d4ace\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.983920 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-1\") pod \"16f11a61-301b-45bc-9ef4-675b164d4ace\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.984041 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-extra-config-0\") pod \"16f11a61-301b-45bc-9ef4-675b164d4ace\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.984144 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-ssh-key\") pod \"16f11a61-301b-45bc-9ef4-675b164d4ace\" (UID: \"16f11a61-301b-45bc-9ef4-675b164d4ace\") " Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.992069 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "16f11a61-301b-45bc-9ef4-675b164d4ace" (UID: "16f11a61-301b-45bc-9ef4-675b164d4ace"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:51:06 crc kubenswrapper[4946]: I1204 15:51:06.993135 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16f11a61-301b-45bc-9ef4-675b164d4ace-kube-api-access-5z4zw" (OuterVolumeSpecName: "kube-api-access-5z4zw") pod "16f11a61-301b-45bc-9ef4-675b164d4ace" (UID: "16f11a61-301b-45bc-9ef4-675b164d4ace"). InnerVolumeSpecName "kube-api-access-5z4zw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.034380 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "16f11a61-301b-45bc-9ef4-675b164d4ace" (UID: "16f11a61-301b-45bc-9ef4-675b164d4ace"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.038778 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "16f11a61-301b-45bc-9ef4-675b164d4ace" (UID: "16f11a61-301b-45bc-9ef4-675b164d4ace"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.044985 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "16f11a61-301b-45bc-9ef4-675b164d4ace" (UID: "16f11a61-301b-45bc-9ef4-675b164d4ace"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.046506 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-inventory" (OuterVolumeSpecName: "inventory") pod "16f11a61-301b-45bc-9ef4-675b164d4ace" (UID: "16f11a61-301b-45bc-9ef4-675b164d4ace"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.053252 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "16f11a61-301b-45bc-9ef4-675b164d4ace" (UID: "16f11a61-301b-45bc-9ef4-675b164d4ace"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.059332 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "16f11a61-301b-45bc-9ef4-675b164d4ace" (UID: "16f11a61-301b-45bc-9ef4-675b164d4ace"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.081525 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "16f11a61-301b-45bc-9ef4-675b164d4ace" (UID: "16f11a61-301b-45bc-9ef4-675b164d4ace"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.087850 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.087883 4946 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.087901 4946 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.087915 4946 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.087927 4946 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.087939 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5z4zw\" (UniqueName: \"kubernetes.io/projected/16f11a61-301b-45bc-9ef4-675b164d4ace-kube-api-access-5z4zw\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.087951 4946 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.087963 4946 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/16f11a61-301b-45bc-9ef4-675b164d4ace-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.087973 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16f11a61-301b-45bc-9ef4-675b164d4ace-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.313302 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" event={"ID":"16f11a61-301b-45bc-9ef4-675b164d4ace","Type":"ContainerDied","Data":"8ad6f04f5d2c8ec8196f587567265d14430908a8915c6b1d71ccb81997d46fc1"} Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.313715 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ad6f04f5d2c8ec8196f587567265d14430908a8915c6b1d71ccb81997d46fc1" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.313597 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-xh4n5" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.479716 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj"] Dec 04 15:51:07 crc kubenswrapper[4946]: E1204 15:51:07.480302 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16f11a61-301b-45bc-9ef4-675b164d4ace" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.480333 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="16f11a61-301b-45bc-9ef4-675b164d4ace" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.480537 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="16f11a61-301b-45bc-9ef4-675b164d4ace" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.486235 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.490516 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.490869 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.491031 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-bhtcv" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.492330 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.492495 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.513872 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj"] Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.611762 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.611884 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.611945 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnxkz\" (UniqueName: \"kubernetes.io/projected/c3a03510-ccc5-4bce-9a72-0e943fd6423d-kube-api-access-dnxkz\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.611995 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.612034 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.612216 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.612498 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.715061 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.715170 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.715212 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnxkz\" (UniqueName: \"kubernetes.io/projected/c3a03510-ccc5-4bce-9a72-0e943fd6423d-kube-api-access-dnxkz\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.715252 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.715289 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.715326 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.715380 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.735330 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.735344 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.736356 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.736537 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.739292 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.741924 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.744036 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnxkz\" (UniqueName: \"kubernetes.io/projected/c3a03510-ccc5-4bce-9a72-0e943fd6423d-kube-api-access-dnxkz\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:07 crc kubenswrapper[4946]: I1204 15:51:07.811040 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:51:08 crc kubenswrapper[4946]: I1204 15:51:08.326972 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-79jzx" event={"ID":"013f501c-470a-4140-8341-c26979e6af23","Type":"ContainerStarted","Data":"16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d"} Dec 04 15:51:08 crc kubenswrapper[4946]: I1204 15:51:08.471774 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj"] Dec 04 15:51:08 crc kubenswrapper[4946]: W1204 15:51:08.486831 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3a03510_ccc5_4bce_9a72_0e943fd6423d.slice/crio-40b9b94ead413502be873d300d43aa7b9d3b56b00931f239f144f82e5d7450f8 WatchSource:0}: Error finding container 40b9b94ead413502be873d300d43aa7b9d3b56b00931f239f144f82e5d7450f8: Status 404 returned error can't find the container with id 40b9b94ead413502be873d300d43aa7b9d3b56b00931f239f144f82e5d7450f8 Dec 04 15:51:09 crc kubenswrapper[4946]: I1204 15:51:09.341592 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" event={"ID":"c3a03510-ccc5-4bce-9a72-0e943fd6423d","Type":"ContainerStarted","Data":"40b9b94ead413502be873d300d43aa7b9d3b56b00931f239f144f82e5d7450f8"} Dec 04 15:51:09 crc kubenswrapper[4946]: I1204 15:51:09.345138 4946 generic.go:334] "Generic (PLEG): container finished" podID="013f501c-470a-4140-8341-c26979e6af23" containerID="16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d" exitCode=0 Dec 04 15:51:09 crc kubenswrapper[4946]: I1204 15:51:09.345174 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-79jzx" event={"ID":"013f501c-470a-4140-8341-c26979e6af23","Type":"ContainerDied","Data":"16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d"} Dec 04 15:51:10 crc kubenswrapper[4946]: I1204 15:51:10.365302 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-79jzx" event={"ID":"013f501c-470a-4140-8341-c26979e6af23","Type":"ContainerStarted","Data":"335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64"} Dec 04 15:51:10 crc kubenswrapper[4946]: I1204 15:51:10.370096 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" event={"ID":"c3a03510-ccc5-4bce-9a72-0e943fd6423d","Type":"ContainerStarted","Data":"f4124d429c4b7fffc06905b78ffe9af495c96079164c21c99e75e73f8d91d076"} Dec 04 15:51:10 crc kubenswrapper[4946]: I1204 15:51:10.400818 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-79jzx" podStartSLOduration=2.698569703 podStartE2EDuration="6.400796207s" podCreationTimestamp="2025-12-04 15:51:04 +0000 UTC" firstStartedPulling="2025-12-04 15:51:06.306448996 +0000 UTC m=+2917.192492627" lastFinishedPulling="2025-12-04 15:51:10.00867548 +0000 UTC m=+2920.894719131" observedRunningTime="2025-12-04 15:51:10.398176571 +0000 UTC m=+2921.284220222" watchObservedRunningTime="2025-12-04 15:51:10.400796207 +0000 UTC m=+2921.286839858" Dec 04 15:51:10 crc kubenswrapper[4946]: I1204 15:51:10.423737 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" podStartSLOduration=2.827904704 podStartE2EDuration="3.423715079s" podCreationTimestamp="2025-12-04 15:51:07 +0000 UTC" firstStartedPulling="2025-12-04 15:51:08.490776021 +0000 UTC m=+2919.376819662" lastFinishedPulling="2025-12-04 15:51:09.086586386 +0000 UTC m=+2919.972630037" observedRunningTime="2025-12-04 15:51:10.420074887 +0000 UTC m=+2921.306118528" watchObservedRunningTime="2025-12-04 15:51:10.423715079 +0000 UTC m=+2921.309758730" Dec 04 15:51:15 crc kubenswrapper[4946]: I1204 15:51:15.356624 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:15 crc kubenswrapper[4946]: I1204 15:51:15.357363 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:15 crc kubenswrapper[4946]: I1204 15:51:15.436956 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:15 crc kubenswrapper[4946]: I1204 15:51:15.505138 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:15 crc kubenswrapper[4946]: I1204 15:51:15.695096 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-79jzx"] Dec 04 15:51:17 crc kubenswrapper[4946]: I1204 15:51:17.499871 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-79jzx" podUID="013f501c-470a-4140-8341-c26979e6af23" containerName="registry-server" containerID="cri-o://335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64" gracePeriod=2 Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.082412 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.220392 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-catalog-content\") pod \"013f501c-470a-4140-8341-c26979e6af23\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.220557 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhqvd\" (UniqueName: \"kubernetes.io/projected/013f501c-470a-4140-8341-c26979e6af23-kube-api-access-jhqvd\") pod \"013f501c-470a-4140-8341-c26979e6af23\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.220664 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-utilities\") pod \"013f501c-470a-4140-8341-c26979e6af23\" (UID: \"013f501c-470a-4140-8341-c26979e6af23\") " Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.221983 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-utilities" (OuterVolumeSpecName: "utilities") pod "013f501c-470a-4140-8341-c26979e6af23" (UID: "013f501c-470a-4140-8341-c26979e6af23"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.228507 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/013f501c-470a-4140-8341-c26979e6af23-kube-api-access-jhqvd" (OuterVolumeSpecName: "kube-api-access-jhqvd") pod "013f501c-470a-4140-8341-c26979e6af23" (UID: "013f501c-470a-4140-8341-c26979e6af23"). InnerVolumeSpecName "kube-api-access-jhqvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.288721 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "013f501c-470a-4140-8341-c26979e6af23" (UID: "013f501c-470a-4140-8341-c26979e6af23"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.324466 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.324514 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhqvd\" (UniqueName: \"kubernetes.io/projected/013f501c-470a-4140-8341-c26979e6af23-kube-api-access-jhqvd\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.324532 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/013f501c-470a-4140-8341-c26979e6af23-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.516488 4946 generic.go:334] "Generic (PLEG): container finished" podID="013f501c-470a-4140-8341-c26979e6af23" containerID="335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64" exitCode=0 Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.516581 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-79jzx" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.516617 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-79jzx" event={"ID":"013f501c-470a-4140-8341-c26979e6af23","Type":"ContainerDied","Data":"335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64"} Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.517341 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-79jzx" event={"ID":"013f501c-470a-4140-8341-c26979e6af23","Type":"ContainerDied","Data":"25a0cb439edefdfa680a34a193412deb5886b327295c763d66d667d738b9b092"} Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.517368 4946 scope.go:117] "RemoveContainer" containerID="335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.557969 4946 scope.go:117] "RemoveContainer" containerID="16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.577301 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-79jzx"] Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.595132 4946 scope.go:117] "RemoveContainer" containerID="4c3762d3bbbf135e025fde135fd3f14b5fbbc3087349bbc2009b85b577c013d9" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.631838 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-79jzx"] Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.649289 4946 scope.go:117] "RemoveContainer" containerID="335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64" Dec 04 15:51:18 crc kubenswrapper[4946]: E1204 15:51:18.649793 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64\": container with ID starting with 335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64 not found: ID does not exist" containerID="335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.649858 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64"} err="failed to get container status \"335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64\": rpc error: code = NotFound desc = could not find container \"335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64\": container with ID starting with 335f52b3ebf5b6d9982e73995975f4e3f2c98ee34bb53395025d6ad13b8f6b64 not found: ID does not exist" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.649893 4946 scope.go:117] "RemoveContainer" containerID="16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d" Dec 04 15:51:18 crc kubenswrapper[4946]: E1204 15:51:18.650619 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d\": container with ID starting with 16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d not found: ID does not exist" containerID="16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.650655 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d"} err="failed to get container status \"16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d\": rpc error: code = NotFound desc = could not find container \"16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d\": container with ID starting with 16eaa8ee56cd83a2278f72bfc5e4ee2580bb87bd8bb40a4bf3ba5a3395e3fd6d not found: ID does not exist" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.650672 4946 scope.go:117] "RemoveContainer" containerID="4c3762d3bbbf135e025fde135fd3f14b5fbbc3087349bbc2009b85b577c013d9" Dec 04 15:51:18 crc kubenswrapper[4946]: E1204 15:51:18.650990 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c3762d3bbbf135e025fde135fd3f14b5fbbc3087349bbc2009b85b577c013d9\": container with ID starting with 4c3762d3bbbf135e025fde135fd3f14b5fbbc3087349bbc2009b85b577c013d9 not found: ID does not exist" containerID="4c3762d3bbbf135e025fde135fd3f14b5fbbc3087349bbc2009b85b577c013d9" Dec 04 15:51:18 crc kubenswrapper[4946]: I1204 15:51:18.651017 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c3762d3bbbf135e025fde135fd3f14b5fbbc3087349bbc2009b85b577c013d9"} err="failed to get container status \"4c3762d3bbbf135e025fde135fd3f14b5fbbc3087349bbc2009b85b577c013d9\": rpc error: code = NotFound desc = could not find container \"4c3762d3bbbf135e025fde135fd3f14b5fbbc3087349bbc2009b85b577c013d9\": container with ID starting with 4c3762d3bbbf135e025fde135fd3f14b5fbbc3087349bbc2009b85b577c013d9 not found: ID does not exist" Dec 04 15:51:19 crc kubenswrapper[4946]: I1204 15:51:19.467735 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="013f501c-470a-4140-8341-c26979e6af23" path="/var/lib/kubelet/pods/013f501c-470a-4140-8341-c26979e6af23/volumes" Dec 04 15:51:52 crc kubenswrapper[4946]: I1204 15:51:52.491750 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:51:52 crc kubenswrapper[4946]: I1204 15:51:52.492740 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:52:22 crc kubenswrapper[4946]: I1204 15:52:22.479205 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:52:22 crc kubenswrapper[4946]: I1204 15:52:22.480110 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:52:52 crc kubenswrapper[4946]: I1204 15:52:52.478935 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 15:52:52 crc kubenswrapper[4946]: I1204 15:52:52.479843 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 15:52:52 crc kubenswrapper[4946]: I1204 15:52:52.479903 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 15:52:52 crc kubenswrapper[4946]: I1204 15:52:52.481314 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 15:52:52 crc kubenswrapper[4946]: I1204 15:52:52.481386 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" gracePeriod=600 Dec 04 15:52:52 crc kubenswrapper[4946]: E1204 15:52:52.613759 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:52:52 crc kubenswrapper[4946]: I1204 15:52:52.671712 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" exitCode=0 Dec 04 15:52:52 crc kubenswrapper[4946]: I1204 15:52:52.671793 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb"} Dec 04 15:52:52 crc kubenswrapper[4946]: I1204 15:52:52.671887 4946 scope.go:117] "RemoveContainer" containerID="26b96320c6b30595b5c40c61c050a0e4d5683556dd53fa6b01c1bbdb9e56780e" Dec 04 15:52:52 crc kubenswrapper[4946]: I1204 15:52:52.673242 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:52:52 crc kubenswrapper[4946]: E1204 15:52:52.673930 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:53:04 crc kubenswrapper[4946]: I1204 15:53:04.453527 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:53:04 crc kubenswrapper[4946]: E1204 15:53:04.454914 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:53:16 crc kubenswrapper[4946]: I1204 15:53:16.453275 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:53:16 crc kubenswrapper[4946]: E1204 15:53:16.454410 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:53:31 crc kubenswrapper[4946]: I1204 15:53:31.454454 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:53:31 crc kubenswrapper[4946]: E1204 15:53:31.455886 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:53:39 crc kubenswrapper[4946]: I1204 15:53:39.213589 4946 generic.go:334] "Generic (PLEG): container finished" podID="c3a03510-ccc5-4bce-9a72-0e943fd6423d" containerID="f4124d429c4b7fffc06905b78ffe9af495c96079164c21c99e75e73f8d91d076" exitCode=0 Dec 04 15:53:39 crc kubenswrapper[4946]: I1204 15:53:39.213694 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" event={"ID":"c3a03510-ccc5-4bce-9a72-0e943fd6423d","Type":"ContainerDied","Data":"f4124d429c4b7fffc06905b78ffe9af495c96079164c21c99e75e73f8d91d076"} Dec 04 15:53:40 crc kubenswrapper[4946]: I1204 15:53:40.833278 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:53:40 crc kubenswrapper[4946]: I1204 15:53:40.981914 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ssh-key\") pod \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " Dec 04 15:53:40 crc kubenswrapper[4946]: I1204 15:53:40.982580 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnxkz\" (UniqueName: \"kubernetes.io/projected/c3a03510-ccc5-4bce-9a72-0e943fd6423d-kube-api-access-dnxkz\") pod \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " Dec 04 15:53:40 crc kubenswrapper[4946]: I1204 15:53:40.982630 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-1\") pod \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " Dec 04 15:53:40 crc kubenswrapper[4946]: I1204 15:53:40.982694 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-telemetry-combined-ca-bundle\") pod \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " Dec 04 15:53:40 crc kubenswrapper[4946]: I1204 15:53:40.983565 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-2\") pod \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " Dec 04 15:53:40 crc kubenswrapper[4946]: I1204 15:53:40.983733 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-inventory\") pod \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " Dec 04 15:53:40 crc kubenswrapper[4946]: I1204 15:53:40.983789 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-0\") pod \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\" (UID: \"c3a03510-ccc5-4bce-9a72-0e943fd6423d\") " Dec 04 15:53:40 crc kubenswrapper[4946]: I1204 15:53:40.990358 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "c3a03510-ccc5-4bce-9a72-0e943fd6423d" (UID: "c3a03510-ccc5-4bce-9a72-0e943fd6423d"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:53:40 crc kubenswrapper[4946]: I1204 15:53:40.995398 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3a03510-ccc5-4bce-9a72-0e943fd6423d-kube-api-access-dnxkz" (OuterVolumeSpecName: "kube-api-access-dnxkz") pod "c3a03510-ccc5-4bce-9a72-0e943fd6423d" (UID: "c3a03510-ccc5-4bce-9a72-0e943fd6423d"). InnerVolumeSpecName "kube-api-access-dnxkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.037792 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c3a03510-ccc5-4bce-9a72-0e943fd6423d" (UID: "c3a03510-ccc5-4bce-9a72-0e943fd6423d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.042463 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "c3a03510-ccc5-4bce-9a72-0e943fd6423d" (UID: "c3a03510-ccc5-4bce-9a72-0e943fd6423d"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.045563 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "c3a03510-ccc5-4bce-9a72-0e943fd6423d" (UID: "c3a03510-ccc5-4bce-9a72-0e943fd6423d"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.051198 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "c3a03510-ccc5-4bce-9a72-0e943fd6423d" (UID: "c3a03510-ccc5-4bce-9a72-0e943fd6423d"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.051257 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-inventory" (OuterVolumeSpecName: "inventory") pod "c3a03510-ccc5-4bce-9a72-0e943fd6423d" (UID: "c3a03510-ccc5-4bce-9a72-0e943fd6423d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.087711 4946 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.087762 4946 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.087774 4946 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.087789 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.087798 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnxkz\" (UniqueName: \"kubernetes.io/projected/c3a03510-ccc5-4bce-9a72-0e943fd6423d-kube-api-access-dnxkz\") on node \"crc\" DevicePath \"\"" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.087811 4946 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.087820 4946 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a03510-ccc5-4bce-9a72-0e943fd6423d-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.251728 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" event={"ID":"c3a03510-ccc5-4bce-9a72-0e943fd6423d","Type":"ContainerDied","Data":"40b9b94ead413502be873d300d43aa7b9d3b56b00931f239f144f82e5d7450f8"} Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.252372 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40b9b94ead413502be873d300d43aa7b9d3b56b00931f239f144f82e5d7450f8" Dec 04 15:53:41 crc kubenswrapper[4946]: I1204 15:53:41.251801 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj" Dec 04 15:53:45 crc kubenswrapper[4946]: I1204 15:53:45.453725 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:53:45 crc kubenswrapper[4946]: E1204 15:53:45.456925 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:53:56 crc kubenswrapper[4946]: I1204 15:53:56.453335 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:53:56 crc kubenswrapper[4946]: E1204 15:53:56.454759 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:54:08 crc kubenswrapper[4946]: I1204 15:54:08.453720 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:54:08 crc kubenswrapper[4946]: E1204 15:54:08.454940 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:54:20 crc kubenswrapper[4946]: I1204 15:54:20.453846 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:54:20 crc kubenswrapper[4946]: E1204 15:54:20.455813 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:54:33 crc kubenswrapper[4946]: I1204 15:54:33.453982 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:54:33 crc kubenswrapper[4946]: E1204 15:54:33.455567 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:54:48 crc kubenswrapper[4946]: I1204 15:54:48.453429 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:54:48 crc kubenswrapper[4946]: E1204 15:54:48.454436 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.866200 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 04 15:55:00 crc kubenswrapper[4946]: E1204 15:55:00.867644 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="013f501c-470a-4140-8341-c26979e6af23" containerName="extract-content" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.867665 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="013f501c-470a-4140-8341-c26979e6af23" containerName="extract-content" Dec 04 15:55:00 crc kubenswrapper[4946]: E1204 15:55:00.867694 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3a03510-ccc5-4bce-9a72-0e943fd6423d" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.867705 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3a03510-ccc5-4bce-9a72-0e943fd6423d" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 04 15:55:00 crc kubenswrapper[4946]: E1204 15:55:00.867741 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="013f501c-470a-4140-8341-c26979e6af23" containerName="extract-utilities" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.867750 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="013f501c-470a-4140-8341-c26979e6af23" containerName="extract-utilities" Dec 04 15:55:00 crc kubenswrapper[4946]: E1204 15:55:00.867767 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="013f501c-470a-4140-8341-c26979e6af23" containerName="registry-server" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.867775 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="013f501c-470a-4140-8341-c26979e6af23" containerName="registry-server" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.868076 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="013f501c-470a-4140-8341-c26979e6af23" containerName="registry-server" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.868139 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3a03510-ccc5-4bce-9a72-0e943fd6423d" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.869343 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.872433 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-7tgpz" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.873403 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.873492 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.875824 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 04 15:55:00 crc kubenswrapper[4946]: I1204 15:55:00.892749 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.016892 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csh6t\" (UniqueName: \"kubernetes.io/projected/3ca94ef1-1df3-4925-9335-b30db3fbffb9-kube-api-access-csh6t\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.016981 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.017022 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.017095 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.017145 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.017222 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.017247 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.017317 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.017521 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-config-data\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.120425 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.120513 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.120603 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.120641 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.120739 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.120767 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.120853 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.120897 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-config-data\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.120939 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csh6t\" (UniqueName: \"kubernetes.io/projected/3ca94ef1-1df3-4925-9335-b30db3fbffb9-kube-api-access-csh6t\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.121963 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.122311 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.123173 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.123164 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.124108 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-config-data\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.131200 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.139984 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.140322 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.161455 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csh6t\" (UniqueName: \"kubernetes.io/projected/3ca94ef1-1df3-4925-9335-b30db3fbffb9-kube-api-access-csh6t\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.172542 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"tempest-tests-tempest\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.201022 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.453696 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:55:01 crc kubenswrapper[4946]: E1204 15:55:01.454379 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.786924 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 04 15:55:01 crc kubenswrapper[4946]: I1204 15:55:01.794001 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 15:55:02 crc kubenswrapper[4946]: I1204 15:55:02.493049 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3ca94ef1-1df3-4925-9335-b30db3fbffb9","Type":"ContainerStarted","Data":"09d669eb4ca917cc46ada16056dcd9920cf31c0b0d8c388d3461a2fd08b82fe2"} Dec 04 15:55:14 crc kubenswrapper[4946]: I1204 15:55:14.454361 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:55:14 crc kubenswrapper[4946]: E1204 15:55:14.455804 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:55:29 crc kubenswrapper[4946]: I1204 15:55:29.461614 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:55:29 crc kubenswrapper[4946]: E1204 15:55:29.462747 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:55:38 crc kubenswrapper[4946]: E1204 15:55:38.258273 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 04 15:55:38 crc kubenswrapper[4946]: E1204 15:55:38.259138 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-csh6t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(3ca94ef1-1df3-4925-9335-b30db3fbffb9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 15:55:38 crc kubenswrapper[4946]: E1204 15:55:38.260320 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="3ca94ef1-1df3-4925-9335-b30db3fbffb9" Dec 04 15:55:39 crc kubenswrapper[4946]: E1204 15:55:39.045169 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="3ca94ef1-1df3-4925-9335-b30db3fbffb9" Dec 04 15:55:42 crc kubenswrapper[4946]: I1204 15:55:42.453226 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:55:42 crc kubenswrapper[4946]: E1204 15:55:42.454745 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:55:52 crc kubenswrapper[4946]: I1204 15:55:52.909108 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 04 15:55:53 crc kubenswrapper[4946]: I1204 15:55:53.457614 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:55:53 crc kubenswrapper[4946]: E1204 15:55:53.458875 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:55:54 crc kubenswrapper[4946]: I1204 15:55:54.223215 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3ca94ef1-1df3-4925-9335-b30db3fbffb9","Type":"ContainerStarted","Data":"1adf249aa994d906def8c6ca2f57ae1760dd976743cff928e75c4e403b164b77"} Dec 04 15:55:54 crc kubenswrapper[4946]: I1204 15:55:54.253737 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.14206533 podStartE2EDuration="55.253708706s" podCreationTimestamp="2025-12-04 15:54:59 +0000 UTC" firstStartedPulling="2025-12-04 15:55:01.793690551 +0000 UTC m=+3152.679734202" lastFinishedPulling="2025-12-04 15:55:52.905333917 +0000 UTC m=+3203.791377578" observedRunningTime="2025-12-04 15:55:54.24170765 +0000 UTC m=+3205.127751301" watchObservedRunningTime="2025-12-04 15:55:54.253708706 +0000 UTC m=+3205.139752357" Dec 04 15:56:05 crc kubenswrapper[4946]: I1204 15:56:05.453101 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:56:05 crc kubenswrapper[4946]: E1204 15:56:05.454532 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:56:16 crc kubenswrapper[4946]: I1204 15:56:16.453706 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:56:16 crc kubenswrapper[4946]: E1204 15:56:16.454721 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:56:29 crc kubenswrapper[4946]: I1204 15:56:29.469265 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:56:29 crc kubenswrapper[4946]: E1204 15:56:29.471017 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.399214 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-s78sr"] Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.404671 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.420495 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s78sr"] Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.531703 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-utilities\") pod \"redhat-marketplace-s78sr\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.531768 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-catalog-content\") pod \"redhat-marketplace-s78sr\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.531830 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnc65\" (UniqueName: \"kubernetes.io/projected/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-kube-api-access-rnc65\") pod \"redhat-marketplace-s78sr\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.633620 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-utilities\") pod \"redhat-marketplace-s78sr\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.633689 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-catalog-content\") pod \"redhat-marketplace-s78sr\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.633734 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnc65\" (UniqueName: \"kubernetes.io/projected/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-kube-api-access-rnc65\") pod \"redhat-marketplace-s78sr\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.634433 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-utilities\") pod \"redhat-marketplace-s78sr\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.634518 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-catalog-content\") pod \"redhat-marketplace-s78sr\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.657786 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnc65\" (UniqueName: \"kubernetes.io/projected/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-kube-api-access-rnc65\") pod \"redhat-marketplace-s78sr\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:37 crc kubenswrapper[4946]: I1204 15:56:37.761783 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:38 crc kubenswrapper[4946]: I1204 15:56:38.376453 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s78sr"] Dec 04 15:56:38 crc kubenswrapper[4946]: I1204 15:56:38.926973 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s78sr" event={"ID":"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90","Type":"ContainerStarted","Data":"3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082"} Dec 04 15:56:38 crc kubenswrapper[4946]: I1204 15:56:38.927583 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s78sr" event={"ID":"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90","Type":"ContainerStarted","Data":"d01c50ecb23fc91be83e8890e291e1a4b562c50d1675c998358675df87ce62f7"} Dec 04 15:56:39 crc kubenswrapper[4946]: I1204 15:56:39.938063 4946 generic.go:334] "Generic (PLEG): container finished" podID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" containerID="3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082" exitCode=0 Dec 04 15:56:39 crc kubenswrapper[4946]: I1204 15:56:39.938244 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s78sr" event={"ID":"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90","Type":"ContainerDied","Data":"3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082"} Dec 04 15:56:40 crc kubenswrapper[4946]: I1204 15:56:40.951198 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s78sr" event={"ID":"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90","Type":"ContainerStarted","Data":"d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302"} Dec 04 15:56:41 crc kubenswrapper[4946]: I1204 15:56:41.453616 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:56:41 crc kubenswrapper[4946]: E1204 15:56:41.454150 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:56:41 crc kubenswrapper[4946]: I1204 15:56:41.963380 4946 generic.go:334] "Generic (PLEG): container finished" podID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" containerID="d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302" exitCode=0 Dec 04 15:56:41 crc kubenswrapper[4946]: I1204 15:56:41.963786 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s78sr" event={"ID":"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90","Type":"ContainerDied","Data":"d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302"} Dec 04 15:56:45 crc kubenswrapper[4946]: I1204 15:56:45.000027 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s78sr" event={"ID":"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90","Type":"ContainerStarted","Data":"f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25"} Dec 04 15:56:45 crc kubenswrapper[4946]: I1204 15:56:45.040040 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-s78sr" podStartSLOduration=4.170707809 podStartE2EDuration="8.040012772s" podCreationTimestamp="2025-12-04 15:56:37 +0000 UTC" firstStartedPulling="2025-12-04 15:56:39.940874885 +0000 UTC m=+3250.826918526" lastFinishedPulling="2025-12-04 15:56:43.810179818 +0000 UTC m=+3254.696223489" observedRunningTime="2025-12-04 15:56:45.029402433 +0000 UTC m=+3255.915446074" watchObservedRunningTime="2025-12-04 15:56:45.040012772 +0000 UTC m=+3255.926056413" Dec 04 15:56:47 crc kubenswrapper[4946]: I1204 15:56:47.765280 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:47 crc kubenswrapper[4946]: I1204 15:56:47.765958 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:47 crc kubenswrapper[4946]: I1204 15:56:47.844671 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:52 crc kubenswrapper[4946]: I1204 15:56:52.453559 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:56:52 crc kubenswrapper[4946]: E1204 15:56:52.454613 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:56:57 crc kubenswrapper[4946]: I1204 15:56:57.849796 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:57 crc kubenswrapper[4946]: I1204 15:56:57.922264 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s78sr"] Dec 04 15:56:58 crc kubenswrapper[4946]: I1204 15:56:58.183641 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-s78sr" podUID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" containerName="registry-server" containerID="cri-o://f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25" gracePeriod=2 Dec 04 15:56:58 crc kubenswrapper[4946]: I1204 15:56:58.861904 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:58 crc kubenswrapper[4946]: I1204 15:56:58.908323 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-utilities\") pod \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " Dec 04 15:56:58 crc kubenswrapper[4946]: I1204 15:56:58.908652 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnc65\" (UniqueName: \"kubernetes.io/projected/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-kube-api-access-rnc65\") pod \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " Dec 04 15:56:58 crc kubenswrapper[4946]: I1204 15:56:58.908704 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-catalog-content\") pod \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\" (UID: \"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90\") " Dec 04 15:56:58 crc kubenswrapper[4946]: I1204 15:56:58.910948 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-utilities" (OuterVolumeSpecName: "utilities") pod "ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" (UID: "ae06d4cd-cfaf-4581-a706-fdd1ca05fe90"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:56:58 crc kubenswrapper[4946]: I1204 15:56:58.928855 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" (UID: "ae06d4cd-cfaf-4581-a706-fdd1ca05fe90"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:56:58 crc kubenswrapper[4946]: I1204 15:56:58.938430 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-kube-api-access-rnc65" (OuterVolumeSpecName: "kube-api-access-rnc65") pod "ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" (UID: "ae06d4cd-cfaf-4581-a706-fdd1ca05fe90"). InnerVolumeSpecName "kube-api-access-rnc65". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.012229 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnc65\" (UniqueName: \"kubernetes.io/projected/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-kube-api-access-rnc65\") on node \"crc\" DevicePath \"\"" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.012285 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.012297 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.196891 4946 generic.go:334] "Generic (PLEG): container finished" podID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" containerID="f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25" exitCode=0 Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.196948 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s78sr" event={"ID":"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90","Type":"ContainerDied","Data":"f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25"} Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.197003 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s78sr" event={"ID":"ae06d4cd-cfaf-4581-a706-fdd1ca05fe90","Type":"ContainerDied","Data":"d01c50ecb23fc91be83e8890e291e1a4b562c50d1675c998358675df87ce62f7"} Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.197026 4946 scope.go:117] "RemoveContainer" containerID="f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.197219 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s78sr" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.229861 4946 scope.go:117] "RemoveContainer" containerID="d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.241727 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s78sr"] Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.258055 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-s78sr"] Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.263145 4946 scope.go:117] "RemoveContainer" containerID="3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.301204 4946 scope.go:117] "RemoveContainer" containerID="f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25" Dec 04 15:56:59 crc kubenswrapper[4946]: E1204 15:56:59.302059 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25\": container with ID starting with f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25 not found: ID does not exist" containerID="f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.302105 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25"} err="failed to get container status \"f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25\": rpc error: code = NotFound desc = could not find container \"f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25\": container with ID starting with f130b2c47bdb4993c1cd5136b9af29cbce35c92b36d21f4397147e480f786a25 not found: ID does not exist" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.302262 4946 scope.go:117] "RemoveContainer" containerID="d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302" Dec 04 15:56:59 crc kubenswrapper[4946]: E1204 15:56:59.302760 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302\": container with ID starting with d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302 not found: ID does not exist" containerID="d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.302818 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302"} err="failed to get container status \"d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302\": rpc error: code = NotFound desc = could not find container \"d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302\": container with ID starting with d798a0efa93176500ebb6d637d5fb47c45fde91da068d9fe131fbe5a68529302 not found: ID does not exist" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.302865 4946 scope.go:117] "RemoveContainer" containerID="3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082" Dec 04 15:56:59 crc kubenswrapper[4946]: E1204 15:56:59.303326 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082\": container with ID starting with 3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082 not found: ID does not exist" containerID="3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.303366 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082"} err="failed to get container status \"3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082\": rpc error: code = NotFound desc = could not find container \"3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082\": container with ID starting with 3d15a32c4f91ac6082cc8642f33a8135fd320c42feeb57c9dbbf8faf60301082 not found: ID does not exist" Dec 04 15:56:59 crc kubenswrapper[4946]: I1204 15:56:59.464715 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" path="/var/lib/kubelet/pods/ae06d4cd-cfaf-4581-a706-fdd1ca05fe90/volumes" Dec 04 15:57:04 crc kubenswrapper[4946]: I1204 15:57:04.452658 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:57:04 crc kubenswrapper[4946]: E1204 15:57:04.453924 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:57:16 crc kubenswrapper[4946]: I1204 15:57:16.452994 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:57:16 crc kubenswrapper[4946]: E1204 15:57:16.453920 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:57:31 crc kubenswrapper[4946]: I1204 15:57:31.454035 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:57:31 crc kubenswrapper[4946]: E1204 15:57:31.455217 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:57:45 crc kubenswrapper[4946]: I1204 15:57:45.453781 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:57:45 crc kubenswrapper[4946]: E1204 15:57:45.455622 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.274306 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bfp64"] Dec 04 15:57:52 crc kubenswrapper[4946]: E1204 15:57:52.276247 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" containerName="registry-server" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.276269 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" containerName="registry-server" Dec 04 15:57:52 crc kubenswrapper[4946]: E1204 15:57:52.276286 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" containerName="extract-utilities" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.276295 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" containerName="extract-utilities" Dec 04 15:57:52 crc kubenswrapper[4946]: E1204 15:57:52.276318 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" containerName="extract-content" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.276324 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" containerName="extract-content" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.276565 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae06d4cd-cfaf-4581-a706-fdd1ca05fe90" containerName="registry-server" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.278181 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.313276 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bfp64"] Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.375292 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vchbv\" (UniqueName: \"kubernetes.io/projected/4e2bfdb6-af28-4c0d-8f0d-a99539c5b225-kube-api-access-vchbv\") pod \"redhat-operators-bfp64\" (UID: \"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225\") " pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.375474 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e2bfdb6-af28-4c0d-8f0d-a99539c5b225-utilities\") pod \"redhat-operators-bfp64\" (UID: \"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225\") " pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.375572 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e2bfdb6-af28-4c0d-8f0d-a99539c5b225-catalog-content\") pod \"redhat-operators-bfp64\" (UID: \"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225\") " pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.477093 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vchbv\" (UniqueName: \"kubernetes.io/projected/4e2bfdb6-af28-4c0d-8f0d-a99539c5b225-kube-api-access-vchbv\") pod \"redhat-operators-bfp64\" (UID: \"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225\") " pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.477633 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e2bfdb6-af28-4c0d-8f0d-a99539c5b225-utilities\") pod \"redhat-operators-bfp64\" (UID: \"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225\") " pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.477763 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e2bfdb6-af28-4c0d-8f0d-a99539c5b225-catalog-content\") pod \"redhat-operators-bfp64\" (UID: \"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225\") " pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.478370 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e2bfdb6-af28-4c0d-8f0d-a99539c5b225-catalog-content\") pod \"redhat-operators-bfp64\" (UID: \"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225\") " pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.478837 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e2bfdb6-af28-4c0d-8f0d-a99539c5b225-utilities\") pod \"redhat-operators-bfp64\" (UID: \"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225\") " pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.523440 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vchbv\" (UniqueName: \"kubernetes.io/projected/4e2bfdb6-af28-4c0d-8f0d-a99539c5b225-kube-api-access-vchbv\") pod \"redhat-operators-bfp64\" (UID: \"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225\") " pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:57:52 crc kubenswrapper[4946]: I1204 15:57:52.658322 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:57:53 crc kubenswrapper[4946]: I1204 15:57:53.246996 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bfp64"] Dec 04 15:57:53 crc kubenswrapper[4946]: I1204 15:57:53.444798 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfp64" event={"ID":"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225","Type":"ContainerStarted","Data":"70e7dfe4b1a81250440870c825403b8abb0ff7eab7962030b81118edf6c371a6"} Dec 04 15:57:54 crc kubenswrapper[4946]: I1204 15:57:54.460655 4946 generic.go:334] "Generic (PLEG): container finished" podID="4e2bfdb6-af28-4c0d-8f0d-a99539c5b225" containerID="8430759a83fbe1e5f4924ee926a5fbdf8f7e81827bd0d46b97136a78bd2996d8" exitCode=0 Dec 04 15:57:54 crc kubenswrapper[4946]: I1204 15:57:54.460714 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfp64" event={"ID":"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225","Type":"ContainerDied","Data":"8430759a83fbe1e5f4924ee926a5fbdf8f7e81827bd0d46b97136a78bd2996d8"} Dec 04 15:58:00 crc kubenswrapper[4946]: I1204 15:58:00.453585 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 15:58:05 crc kubenswrapper[4946]: I1204 15:58:05.627056 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"d0a1437bd15130925a740c259ecdc4342e886fbcfecf8430fa402c5d9360e53c"} Dec 04 15:58:13 crc kubenswrapper[4946]: I1204 15:58:13.755784 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfp64" event={"ID":"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225","Type":"ContainerStarted","Data":"b701d5e4f37482efe0fe012249735feb193c05f5dc1700230fd20378194ae8b6"} Dec 04 15:58:14 crc kubenswrapper[4946]: I1204 15:58:14.773672 4946 generic.go:334] "Generic (PLEG): container finished" podID="4e2bfdb6-af28-4c0d-8f0d-a99539c5b225" containerID="b701d5e4f37482efe0fe012249735feb193c05f5dc1700230fd20378194ae8b6" exitCode=0 Dec 04 15:58:14 crc kubenswrapper[4946]: I1204 15:58:14.773729 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfp64" event={"ID":"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225","Type":"ContainerDied","Data":"b701d5e4f37482efe0fe012249735feb193c05f5dc1700230fd20378194ae8b6"} Dec 04 15:58:19 crc kubenswrapper[4946]: I1204 15:58:19.855886 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfp64" event={"ID":"4e2bfdb6-af28-4c0d-8f0d-a99539c5b225","Type":"ContainerStarted","Data":"1e48486349cfc54bea10a7fa883d9c228a620d45e75ba0e0955ffffa9a7e5e32"} Dec 04 15:58:19 crc kubenswrapper[4946]: I1204 15:58:19.901181 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bfp64" podStartSLOduration=3.5199068000000002 podStartE2EDuration="27.901095774s" podCreationTimestamp="2025-12-04 15:57:52 +0000 UTC" firstStartedPulling="2025-12-04 15:57:54.465265811 +0000 UTC m=+3325.351309442" lastFinishedPulling="2025-12-04 15:58:18.846454775 +0000 UTC m=+3349.732498416" observedRunningTime="2025-12-04 15:58:19.88690764 +0000 UTC m=+3350.772951291" watchObservedRunningTime="2025-12-04 15:58:19.901095774 +0000 UTC m=+3350.787139435" Dec 04 15:58:22 crc kubenswrapper[4946]: I1204 15:58:22.659330 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:58:22 crc kubenswrapper[4946]: I1204 15:58:22.660172 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:58:23 crc kubenswrapper[4946]: I1204 15:58:23.736874 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bfp64" podUID="4e2bfdb6-af28-4c0d-8f0d-a99539c5b225" containerName="registry-server" probeResult="failure" output=< Dec 04 15:58:23 crc kubenswrapper[4946]: timeout: failed to connect service ":50051" within 1s Dec 04 15:58:23 crc kubenswrapper[4946]: > Dec 04 15:58:32 crc kubenswrapper[4946]: I1204 15:58:32.733789 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:58:32 crc kubenswrapper[4946]: I1204 15:58:32.812245 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bfp64" Dec 04 15:58:33 crc kubenswrapper[4946]: I1204 15:58:33.994569 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bfp64"] Dec 04 15:58:34 crc kubenswrapper[4946]: I1204 15:58:34.086876 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g4h2m"] Dec 04 15:58:34 crc kubenswrapper[4946]: I1204 15:58:34.087239 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-g4h2m" podUID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" containerName="registry-server" containerID="cri-o://6f69e03a9fa32a5f877643bc50d69bc50fbe7e8f074a975c9d6459812d77a781" gracePeriod=2 Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.091403 4946 generic.go:334] "Generic (PLEG): container finished" podID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" containerID="6f69e03a9fa32a5f877643bc50d69bc50fbe7e8f074a975c9d6459812d77a781" exitCode=0 Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.091494 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g4h2m" event={"ID":"6380379a-3b27-4d4a-a8b9-9b8c72dcde44","Type":"ContainerDied","Data":"6f69e03a9fa32a5f877643bc50d69bc50fbe7e8f074a975c9d6459812d77a781"} Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.315086 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.405526 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-catalog-content\") pod \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.405878 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfb7d\" (UniqueName: \"kubernetes.io/projected/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-kube-api-access-mfb7d\") pod \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.406179 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-utilities\") pod \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\" (UID: \"6380379a-3b27-4d4a-a8b9-9b8c72dcde44\") " Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.410400 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-utilities" (OuterVolumeSpecName: "utilities") pod "6380379a-3b27-4d4a-a8b9-9b8c72dcde44" (UID: "6380379a-3b27-4d4a-a8b9-9b8c72dcde44"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.415164 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-kube-api-access-mfb7d" (OuterVolumeSpecName: "kube-api-access-mfb7d") pod "6380379a-3b27-4d4a-a8b9-9b8c72dcde44" (UID: "6380379a-3b27-4d4a-a8b9-9b8c72dcde44"). InnerVolumeSpecName "kube-api-access-mfb7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.509937 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.509981 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfb7d\" (UniqueName: \"kubernetes.io/projected/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-kube-api-access-mfb7d\") on node \"crc\" DevicePath \"\"" Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.654037 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6380379a-3b27-4d4a-a8b9-9b8c72dcde44" (UID: "6380379a-3b27-4d4a-a8b9-9b8c72dcde44"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 15:58:35 crc kubenswrapper[4946]: I1204 15:58:35.715533 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6380379a-3b27-4d4a-a8b9-9b8c72dcde44-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 15:58:36 crc kubenswrapper[4946]: I1204 15:58:36.104171 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g4h2m" event={"ID":"6380379a-3b27-4d4a-a8b9-9b8c72dcde44","Type":"ContainerDied","Data":"e355f80a7d786e81f4b829c1983ff2233ff5b2faa97abc532c8e7199ec524912"} Dec 04 15:58:36 crc kubenswrapper[4946]: I1204 15:58:36.104311 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g4h2m" Dec 04 15:58:36 crc kubenswrapper[4946]: I1204 15:58:36.104559 4946 scope.go:117] "RemoveContainer" containerID="6f69e03a9fa32a5f877643bc50d69bc50fbe7e8f074a975c9d6459812d77a781" Dec 04 15:58:36 crc kubenswrapper[4946]: I1204 15:58:36.130406 4946 scope.go:117] "RemoveContainer" containerID="ceb68d6fd37585055dc60d0ee69088b52d54c0e53a32ed7025dc6452050e2a1b" Dec 04 15:58:36 crc kubenswrapper[4946]: I1204 15:58:36.171828 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g4h2m"] Dec 04 15:58:36 crc kubenswrapper[4946]: I1204 15:58:36.192403 4946 scope.go:117] "RemoveContainer" containerID="b3ab195111de5507b5611ce60a72666422d2b1be4841fb5a5350ee918f640890" Dec 04 15:58:36 crc kubenswrapper[4946]: I1204 15:58:36.202492 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-g4h2m"] Dec 04 15:58:37 crc kubenswrapper[4946]: I1204 15:58:37.465681 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" path="/var/lib/kubelet/pods/6380379a-3b27-4d4a-a8b9-9b8c72dcde44/volumes" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.158445 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz"] Dec 04 16:00:00 crc kubenswrapper[4946]: E1204 16:00:00.159742 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" containerName="registry-server" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.159760 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" containerName="registry-server" Dec 04 16:00:00 crc kubenswrapper[4946]: E1204 16:00:00.159787 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" containerName="extract-content" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.159794 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" containerName="extract-content" Dec 04 16:00:00 crc kubenswrapper[4946]: E1204 16:00:00.159827 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" containerName="extract-utilities" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.159837 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" containerName="extract-utilities" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.160185 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="6380379a-3b27-4d4a-a8b9-9b8c72dcde44" containerName="registry-server" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.161181 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.164411 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.164707 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.171862 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz"] Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.236020 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e0652238-98f4-4734-8db8-7bf8f4f9f8da-config-volume\") pod \"collect-profiles-29414400-6b7fz\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.236108 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e0652238-98f4-4734-8db8-7bf8f4f9f8da-secret-volume\") pod \"collect-profiles-29414400-6b7fz\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.236263 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbkrj\" (UniqueName: \"kubernetes.io/projected/e0652238-98f4-4734-8db8-7bf8f4f9f8da-kube-api-access-tbkrj\") pod \"collect-profiles-29414400-6b7fz\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.338577 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e0652238-98f4-4734-8db8-7bf8f4f9f8da-config-volume\") pod \"collect-profiles-29414400-6b7fz\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.338671 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e0652238-98f4-4734-8db8-7bf8f4f9f8da-secret-volume\") pod \"collect-profiles-29414400-6b7fz\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.338764 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbkrj\" (UniqueName: \"kubernetes.io/projected/e0652238-98f4-4734-8db8-7bf8f4f9f8da-kube-api-access-tbkrj\") pod \"collect-profiles-29414400-6b7fz\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.339984 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e0652238-98f4-4734-8db8-7bf8f4f9f8da-config-volume\") pod \"collect-profiles-29414400-6b7fz\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.346413 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e0652238-98f4-4734-8db8-7bf8f4f9f8da-secret-volume\") pod \"collect-profiles-29414400-6b7fz\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.357898 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbkrj\" (UniqueName: \"kubernetes.io/projected/e0652238-98f4-4734-8db8-7bf8f4f9f8da-kube-api-access-tbkrj\") pod \"collect-profiles-29414400-6b7fz\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:00 crc kubenswrapper[4946]: I1204 16:00:00.509479 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:01 crc kubenswrapper[4946]: I1204 16:00:01.021823 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz"] Dec 04 16:00:01 crc kubenswrapper[4946]: I1204 16:00:01.602841 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" event={"ID":"e0652238-98f4-4734-8db8-7bf8f4f9f8da","Type":"ContainerStarted","Data":"bfecd8a820c5388a9f98dac15ccf646bbc138b4711764bee197e9a85953deab3"} Dec 04 16:00:01 crc kubenswrapper[4946]: I1204 16:00:01.603212 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" event={"ID":"e0652238-98f4-4734-8db8-7bf8f4f9f8da","Type":"ContainerStarted","Data":"a7afb78656794dc04c95d4db8209178da5e86c125cf0da623ab0a4f0990160e3"} Dec 04 16:00:01 crc kubenswrapper[4946]: I1204 16:00:01.630346 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" podStartSLOduration=1.630326496 podStartE2EDuration="1.630326496s" podCreationTimestamp="2025-12-04 16:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 16:00:01.62740038 +0000 UTC m=+3452.513444021" watchObservedRunningTime="2025-12-04 16:00:01.630326496 +0000 UTC m=+3452.516370137" Dec 04 16:00:02 crc kubenswrapper[4946]: I1204 16:00:02.617002 4946 generic.go:334] "Generic (PLEG): container finished" podID="e0652238-98f4-4734-8db8-7bf8f4f9f8da" containerID="bfecd8a820c5388a9f98dac15ccf646bbc138b4711764bee197e9a85953deab3" exitCode=0 Dec 04 16:00:02 crc kubenswrapper[4946]: I1204 16:00:02.617189 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" event={"ID":"e0652238-98f4-4734-8db8-7bf8f4f9f8da","Type":"ContainerDied","Data":"bfecd8a820c5388a9f98dac15ccf646bbc138b4711764bee197e9a85953deab3"} Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.261364 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.457351 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e0652238-98f4-4734-8db8-7bf8f4f9f8da-secret-volume\") pod \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.457424 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbkrj\" (UniqueName: \"kubernetes.io/projected/e0652238-98f4-4734-8db8-7bf8f4f9f8da-kube-api-access-tbkrj\") pod \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.457568 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e0652238-98f4-4734-8db8-7bf8f4f9f8da-config-volume\") pod \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\" (UID: \"e0652238-98f4-4734-8db8-7bf8f4f9f8da\") " Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.458614 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0652238-98f4-4734-8db8-7bf8f4f9f8da-config-volume" (OuterVolumeSpecName: "config-volume") pod "e0652238-98f4-4734-8db8-7bf8f4f9f8da" (UID: "e0652238-98f4-4734-8db8-7bf8f4f9f8da"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.466052 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0652238-98f4-4734-8db8-7bf8f4f9f8da-kube-api-access-tbkrj" (OuterVolumeSpecName: "kube-api-access-tbkrj") pod "e0652238-98f4-4734-8db8-7bf8f4f9f8da" (UID: "e0652238-98f4-4734-8db8-7bf8f4f9f8da"). InnerVolumeSpecName "kube-api-access-tbkrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.466143 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0652238-98f4-4734-8db8-7bf8f4f9f8da-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e0652238-98f4-4734-8db8-7bf8f4f9f8da" (UID: "e0652238-98f4-4734-8db8-7bf8f4f9f8da"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.563036 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbkrj\" (UniqueName: \"kubernetes.io/projected/e0652238-98f4-4734-8db8-7bf8f4f9f8da-kube-api-access-tbkrj\") on node \"crc\" DevicePath \"\"" Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.563087 4946 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e0652238-98f4-4734-8db8-7bf8f4f9f8da-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.563100 4946 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e0652238-98f4-4734-8db8-7bf8f4f9f8da-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.638097 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" event={"ID":"e0652238-98f4-4734-8db8-7bf8f4f9f8da","Type":"ContainerDied","Data":"a7afb78656794dc04c95d4db8209178da5e86c125cf0da623ab0a4f0990160e3"} Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.638164 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7afb78656794dc04c95d4db8209178da5e86c125cf0da623ab0a4f0990160e3" Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.638221 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414400-6b7fz" Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.706655 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj"] Dec 04 16:00:04 crc kubenswrapper[4946]: I1204 16:00:04.717835 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414355-pvrlj"] Dec 04 16:00:05 crc kubenswrapper[4946]: I1204 16:00:05.470802 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f13c9492-c978-4a41-976b-080495c9d5fc" path="/var/lib/kubelet/pods/f13c9492-c978-4a41-976b-080495c9d5fc/volumes" Dec 04 16:00:22 crc kubenswrapper[4946]: I1204 16:00:22.478315 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:00:22 crc kubenswrapper[4946]: I1204 16:00:22.479437 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:00:46 crc kubenswrapper[4946]: I1204 16:00:46.281052 4946 scope.go:117] "RemoveContainer" containerID="7ab184c4cdd2a18d1a8c4436db03e638f01954927c3697df17d235ced7c6e270" Dec 04 16:00:52 crc kubenswrapper[4946]: I1204 16:00:52.490976 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:00:52 crc kubenswrapper[4946]: I1204 16:00:52.491820 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.163894 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29414401-bnsr9"] Dec 04 16:01:00 crc kubenswrapper[4946]: E1204 16:01:00.165772 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0652238-98f4-4734-8db8-7bf8f4f9f8da" containerName="collect-profiles" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.165788 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0652238-98f4-4734-8db8-7bf8f4f9f8da" containerName="collect-profiles" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.166247 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0652238-98f4-4734-8db8-7bf8f4f9f8da" containerName="collect-profiles" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.167407 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.177190 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29414401-bnsr9"] Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.234876 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89wjj\" (UniqueName: \"kubernetes.io/projected/ea23c958-243b-479d-a5f3-83e729f96b17-kube-api-access-89wjj\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.234964 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-combined-ca-bundle\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.235040 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-fernet-keys\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.235288 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-config-data\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.337104 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-config-data\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.337234 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89wjj\" (UniqueName: \"kubernetes.io/projected/ea23c958-243b-479d-a5f3-83e729f96b17-kube-api-access-89wjj\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.337272 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-combined-ca-bundle\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.337325 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-fernet-keys\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.348364 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-config-data\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.348362 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-fernet-keys\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.351165 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-combined-ca-bundle\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.359197 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89wjj\" (UniqueName: \"kubernetes.io/projected/ea23c958-243b-479d-a5f3-83e729f96b17-kube-api-access-89wjj\") pod \"keystone-cron-29414401-bnsr9\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:00 crc kubenswrapper[4946]: I1204 16:01:00.525398 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:01 crc kubenswrapper[4946]: I1204 16:01:01.002186 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29414401-bnsr9"] Dec 04 16:01:01 crc kubenswrapper[4946]: I1204 16:01:01.262736 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29414401-bnsr9" event={"ID":"ea23c958-243b-479d-a5f3-83e729f96b17","Type":"ContainerStarted","Data":"4a01ddd27ed8370d899cef37569711e0eb03ed5e33e58de366dab6d4cf024ce4"} Dec 04 16:01:03 crc kubenswrapper[4946]: I1204 16:01:03.295606 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29414401-bnsr9" event={"ID":"ea23c958-243b-479d-a5f3-83e729f96b17","Type":"ContainerStarted","Data":"1088d7fb72cc08ed234d85c185366fbea7585e57fb79eac7d6c222b50778604d"} Dec 04 16:01:03 crc kubenswrapper[4946]: I1204 16:01:03.323003 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29414401-bnsr9" podStartSLOduration=3.322984893 podStartE2EDuration="3.322984893s" podCreationTimestamp="2025-12-04 16:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 16:01:03.317695348 +0000 UTC m=+3514.203738999" watchObservedRunningTime="2025-12-04 16:01:03.322984893 +0000 UTC m=+3514.209028534" Dec 04 16:01:05 crc kubenswrapper[4946]: I1204 16:01:05.317871 4946 generic.go:334] "Generic (PLEG): container finished" podID="ea23c958-243b-479d-a5f3-83e729f96b17" containerID="1088d7fb72cc08ed234d85c185366fbea7585e57fb79eac7d6c222b50778604d" exitCode=0 Dec 04 16:01:05 crc kubenswrapper[4946]: I1204 16:01:05.317964 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29414401-bnsr9" event={"ID":"ea23c958-243b-479d-a5f3-83e729f96b17","Type":"ContainerDied","Data":"1088d7fb72cc08ed234d85c185366fbea7585e57fb79eac7d6c222b50778604d"} Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.103608 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.239630 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-combined-ca-bundle\") pod \"ea23c958-243b-479d-a5f3-83e729f96b17\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.239836 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89wjj\" (UniqueName: \"kubernetes.io/projected/ea23c958-243b-479d-a5f3-83e729f96b17-kube-api-access-89wjj\") pod \"ea23c958-243b-479d-a5f3-83e729f96b17\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.240035 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-config-data\") pod \"ea23c958-243b-479d-a5f3-83e729f96b17\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.240089 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-fernet-keys\") pod \"ea23c958-243b-479d-a5f3-83e729f96b17\" (UID: \"ea23c958-243b-479d-a5f3-83e729f96b17\") " Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.248083 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ea23c958-243b-479d-a5f3-83e729f96b17" (UID: "ea23c958-243b-479d-a5f3-83e729f96b17"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.248274 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea23c958-243b-479d-a5f3-83e729f96b17-kube-api-access-89wjj" (OuterVolumeSpecName: "kube-api-access-89wjj") pod "ea23c958-243b-479d-a5f3-83e729f96b17" (UID: "ea23c958-243b-479d-a5f3-83e729f96b17"). InnerVolumeSpecName "kube-api-access-89wjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.317092 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea23c958-243b-479d-a5f3-83e729f96b17" (UID: "ea23c958-243b-479d-a5f3-83e729f96b17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.329955 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-config-data" (OuterVolumeSpecName: "config-data") pod "ea23c958-243b-479d-a5f3-83e729f96b17" (UID: "ea23c958-243b-479d-a5f3-83e729f96b17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.339634 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29414401-bnsr9" event={"ID":"ea23c958-243b-479d-a5f3-83e729f96b17","Type":"ContainerDied","Data":"4a01ddd27ed8370d899cef37569711e0eb03ed5e33e58de366dab6d4cf024ce4"} Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.339686 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a01ddd27ed8370d899cef37569711e0eb03ed5e33e58de366dab6d4cf024ce4" Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.339730 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29414401-bnsr9" Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.343323 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89wjj\" (UniqueName: \"kubernetes.io/projected/ea23c958-243b-479d-a5f3-83e729f96b17-kube-api-access-89wjj\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.343349 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.343359 4946 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:07 crc kubenswrapper[4946]: I1204 16:01:07.343370 4946 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea23c958-243b-479d-a5f3-83e729f96b17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:22 crc kubenswrapper[4946]: I1204 16:01:22.478800 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:01:22 crc kubenswrapper[4946]: I1204 16:01:22.479719 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:01:22 crc kubenswrapper[4946]: I1204 16:01:22.479770 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 16:01:22 crc kubenswrapper[4946]: I1204 16:01:22.481189 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d0a1437bd15130925a740c259ecdc4342e886fbcfecf8430fa402c5d9360e53c"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 16:01:22 crc kubenswrapper[4946]: I1204 16:01:22.481249 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://d0a1437bd15130925a740c259ecdc4342e886fbcfecf8430fa402c5d9360e53c" gracePeriod=600 Dec 04 16:01:23 crc kubenswrapper[4946]: I1204 16:01:23.508857 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="d0a1437bd15130925a740c259ecdc4342e886fbcfecf8430fa402c5d9360e53c" exitCode=0 Dec 04 16:01:23 crc kubenswrapper[4946]: I1204 16:01:23.508940 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"d0a1437bd15130925a740c259ecdc4342e886fbcfecf8430fa402c5d9360e53c"} Dec 04 16:01:23 crc kubenswrapper[4946]: I1204 16:01:23.509687 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0"} Dec 04 16:01:23 crc kubenswrapper[4946]: I1204 16:01:23.509720 4946 scope.go:117] "RemoveContainer" containerID="aa826c144810ea788ff4d526b4c64502473d581dd511b996d57853749b073afb" Dec 04 16:01:43 crc kubenswrapper[4946]: I1204 16:01:43.756525 4946 generic.go:334] "Generic (PLEG): container finished" podID="3ca94ef1-1df3-4925-9335-b30db3fbffb9" containerID="1adf249aa994d906def8c6ca2f57ae1760dd976743cff928e75c4e403b164b77" exitCode=0 Dec 04 16:01:43 crc kubenswrapper[4946]: I1204 16:01:43.756636 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3ca94ef1-1df3-4925-9335-b30db3fbffb9","Type":"ContainerDied","Data":"1adf249aa994d906def8c6ca2f57ae1760dd976743cff928e75c4e403b164b77"} Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.504882 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.628496 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ssh-key\") pod \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.628676 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config-secret\") pod \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.628733 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-temporary\") pod \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.628815 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ca-certs\") pod \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.628939 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.628979 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-workdir\") pod \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.629016 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config\") pod \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.629050 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csh6t\" (UniqueName: \"kubernetes.io/projected/3ca94ef1-1df3-4925-9335-b30db3fbffb9-kube-api-access-csh6t\") pod \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.629095 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-config-data\") pod \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\" (UID: \"3ca94ef1-1df3-4925-9335-b30db3fbffb9\") " Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.629739 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "3ca94ef1-1df3-4925-9335-b30db3fbffb9" (UID: "3ca94ef1-1df3-4925-9335-b30db3fbffb9"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.630806 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-config-data" (OuterVolumeSpecName: "config-data") pod "3ca94ef1-1df3-4925-9335-b30db3fbffb9" (UID: "3ca94ef1-1df3-4925-9335-b30db3fbffb9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.636380 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ca94ef1-1df3-4925-9335-b30db3fbffb9-kube-api-access-csh6t" (OuterVolumeSpecName: "kube-api-access-csh6t") pod "3ca94ef1-1df3-4925-9335-b30db3fbffb9" (UID: "3ca94ef1-1df3-4925-9335-b30db3fbffb9"). InnerVolumeSpecName "kube-api-access-csh6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.636994 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "test-operator-logs") pod "3ca94ef1-1df3-4925-9335-b30db3fbffb9" (UID: "3ca94ef1-1df3-4925-9335-b30db3fbffb9"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.669537 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "3ca94ef1-1df3-4925-9335-b30db3fbffb9" (UID: "3ca94ef1-1df3-4925-9335-b30db3fbffb9"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.687761 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3ca94ef1-1df3-4925-9335-b30db3fbffb9" (UID: "3ca94ef1-1df3-4925-9335-b30db3fbffb9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.703519 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "3ca94ef1-1df3-4925-9335-b30db3fbffb9" (UID: "3ca94ef1-1df3-4925-9335-b30db3fbffb9"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.712982 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "3ca94ef1-1df3-4925-9335-b30db3fbffb9" (UID: "3ca94ef1-1df3-4925-9335-b30db3fbffb9"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.733258 4946 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.733344 4946 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.733361 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csh6t\" (UniqueName: \"kubernetes.io/projected/3ca94ef1-1df3-4925-9335-b30db3fbffb9-kube-api-access-csh6t\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.733406 4946 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ca94ef1-1df3-4925-9335-b30db3fbffb9-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.733419 4946 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.733439 4946 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.733460 4946 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.733473 4946 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/3ca94ef1-1df3-4925-9335-b30db3fbffb9-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.755869 4946 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.780487 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"3ca94ef1-1df3-4925-9335-b30db3fbffb9","Type":"ContainerDied","Data":"09d669eb4ca917cc46ada16056dcd9920cf31c0b0d8c388d3461a2fd08b82fe2"} Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.780530 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.780544 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09d669eb4ca917cc46ada16056dcd9920cf31c0b0d8c388d3461a2fd08b82fe2" Dec 04 16:01:45 crc kubenswrapper[4946]: I1204 16:01:45.836539 4946 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:46 crc kubenswrapper[4946]: I1204 16:01:46.119350 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "3ca94ef1-1df3-4925-9335-b30db3fbffb9" (UID: "3ca94ef1-1df3-4925-9335-b30db3fbffb9"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:01:46 crc kubenswrapper[4946]: I1204 16:01:46.143616 4946 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/3ca94ef1-1df3-4925-9335-b30db3fbffb9-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 04 16:01:50 crc kubenswrapper[4946]: I1204 16:01:50.920108 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 04 16:01:50 crc kubenswrapper[4946]: E1204 16:01:50.921719 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea23c958-243b-479d-a5f3-83e729f96b17" containerName="keystone-cron" Dec 04 16:01:50 crc kubenswrapper[4946]: I1204 16:01:50.921738 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea23c958-243b-479d-a5f3-83e729f96b17" containerName="keystone-cron" Dec 04 16:01:50 crc kubenswrapper[4946]: E1204 16:01:50.921772 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ca94ef1-1df3-4925-9335-b30db3fbffb9" containerName="tempest-tests-tempest-tests-runner" Dec 04 16:01:50 crc kubenswrapper[4946]: I1204 16:01:50.921781 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ca94ef1-1df3-4925-9335-b30db3fbffb9" containerName="tempest-tests-tempest-tests-runner" Dec 04 16:01:50 crc kubenswrapper[4946]: I1204 16:01:50.922078 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea23c958-243b-479d-a5f3-83e729f96b17" containerName="keystone-cron" Dec 04 16:01:50 crc kubenswrapper[4946]: I1204 16:01:50.922097 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ca94ef1-1df3-4925-9335-b30db3fbffb9" containerName="tempest-tests-tempest-tests-runner" Dec 04 16:01:50 crc kubenswrapper[4946]: I1204 16:01:50.923629 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 16:01:50 crc kubenswrapper[4946]: I1204 16:01:50.926787 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-7tgpz" Dec 04 16:01:50 crc kubenswrapper[4946]: I1204 16:01:50.934886 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 04 16:01:51 crc kubenswrapper[4946]: I1204 16:01:51.080818 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"58d92c3f-b05e-47c1-89f7-55d7c3686966\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 16:01:51 crc kubenswrapper[4946]: I1204 16:01:51.081328 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qp7px\" (UniqueName: \"kubernetes.io/projected/58d92c3f-b05e-47c1-89f7-55d7c3686966-kube-api-access-qp7px\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"58d92c3f-b05e-47c1-89f7-55d7c3686966\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 16:01:51 crc kubenswrapper[4946]: I1204 16:01:51.184565 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"58d92c3f-b05e-47c1-89f7-55d7c3686966\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 16:01:51 crc kubenswrapper[4946]: I1204 16:01:51.185100 4946 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"58d92c3f-b05e-47c1-89f7-55d7c3686966\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 16:01:51 crc kubenswrapper[4946]: I1204 16:01:51.185136 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qp7px\" (UniqueName: \"kubernetes.io/projected/58d92c3f-b05e-47c1-89f7-55d7c3686966-kube-api-access-qp7px\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"58d92c3f-b05e-47c1-89f7-55d7c3686966\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 16:01:51 crc kubenswrapper[4946]: I1204 16:01:51.207923 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qp7px\" (UniqueName: \"kubernetes.io/projected/58d92c3f-b05e-47c1-89f7-55d7c3686966-kube-api-access-qp7px\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"58d92c3f-b05e-47c1-89f7-55d7c3686966\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 16:01:51 crc kubenswrapper[4946]: I1204 16:01:51.234225 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"58d92c3f-b05e-47c1-89f7-55d7c3686966\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 16:01:51 crc kubenswrapper[4946]: I1204 16:01:51.250154 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 16:01:51 crc kubenswrapper[4946]: I1204 16:01:51.774667 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 04 16:01:51 crc kubenswrapper[4946]: I1204 16:01:51.778808 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 16:01:51 crc kubenswrapper[4946]: I1204 16:01:51.864083 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"58d92c3f-b05e-47c1-89f7-55d7c3686966","Type":"ContainerStarted","Data":"23578f538521bcea4131cef7163b085f67508056246ab8f02e189a0de9ccd3ab"} Dec 04 16:01:52 crc kubenswrapper[4946]: I1204 16:01:52.875338 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"58d92c3f-b05e-47c1-89f7-55d7c3686966","Type":"ContainerStarted","Data":"88086947baca90b8b053bfa91364aaf9df388f2f1e2c4d560cc0246e04024e7f"} Dec 04 16:01:52 crc kubenswrapper[4946]: I1204 16:01:52.897394 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.074094163 podStartE2EDuration="2.897372976s" podCreationTimestamp="2025-12-04 16:01:50 +0000 UTC" firstStartedPulling="2025-12-04 16:01:51.778553769 +0000 UTC m=+3562.664597410" lastFinishedPulling="2025-12-04 16:01:52.601832582 +0000 UTC m=+3563.487876223" observedRunningTime="2025-12-04 16:01:52.887029724 +0000 UTC m=+3563.773073375" watchObservedRunningTime="2025-12-04 16:01:52.897372976 +0000 UTC m=+3563.783416617" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.720876 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-djt66"] Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.727694 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.743643 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-djt66"] Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.838326 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-utilities\") pod \"community-operators-djt66\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.838563 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm24k\" (UniqueName: \"kubernetes.io/projected/c00f6e37-9ae9-4b26-8efe-237923f4dbae-kube-api-access-zm24k\") pod \"community-operators-djt66\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.838802 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-catalog-content\") pod \"community-operators-djt66\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.909689 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ff888"] Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.913286 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.923575 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ff888"] Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.943612 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm24k\" (UniqueName: \"kubernetes.io/projected/c00f6e37-9ae9-4b26-8efe-237923f4dbae-kube-api-access-zm24k\") pod \"community-operators-djt66\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.943748 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hflcw\" (UniqueName: \"kubernetes.io/projected/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-kube-api-access-hflcw\") pod \"certified-operators-ff888\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.943798 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-catalog-content\") pod \"community-operators-djt66\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.943946 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-utilities\") pod \"certified-operators-ff888\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.944055 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-catalog-content\") pod \"certified-operators-ff888\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.944117 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-utilities\") pod \"community-operators-djt66\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.944834 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-utilities\") pod \"community-operators-djt66\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.945422 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-catalog-content\") pod \"community-operators-djt66\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:11 crc kubenswrapper[4946]: I1204 16:02:11.970885 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm24k\" (UniqueName: \"kubernetes.io/projected/c00f6e37-9ae9-4b26-8efe-237923f4dbae-kube-api-access-zm24k\") pod \"community-operators-djt66\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:12 crc kubenswrapper[4946]: I1204 16:02:12.046364 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-catalog-content\") pod \"certified-operators-ff888\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:12 crc kubenswrapper[4946]: I1204 16:02:12.046537 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hflcw\" (UniqueName: \"kubernetes.io/projected/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-kube-api-access-hflcw\") pod \"certified-operators-ff888\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:12 crc kubenswrapper[4946]: I1204 16:02:12.046625 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-utilities\") pod \"certified-operators-ff888\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:12 crc kubenswrapper[4946]: I1204 16:02:12.047689 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-utilities\") pod \"certified-operators-ff888\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:12 crc kubenswrapper[4946]: I1204 16:02:12.047976 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-catalog-content\") pod \"certified-operators-ff888\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:12 crc kubenswrapper[4946]: I1204 16:02:12.072753 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hflcw\" (UniqueName: \"kubernetes.io/projected/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-kube-api-access-hflcw\") pod \"certified-operators-ff888\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:12 crc kubenswrapper[4946]: I1204 16:02:12.082948 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:12 crc kubenswrapper[4946]: I1204 16:02:12.269249 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:13 crc kubenswrapper[4946]: I1204 16:02:13.081710 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ff888"] Dec 04 16:02:13 crc kubenswrapper[4946]: I1204 16:02:13.188012 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ff888" event={"ID":"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2","Type":"ContainerStarted","Data":"7d2ff06b30c9112035a9b4e61f543ef0ffa6b7fa34ce986e1ac212503f83df97"} Dec 04 16:02:13 crc kubenswrapper[4946]: I1204 16:02:13.694911 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-djt66"] Dec 04 16:02:14 crc kubenswrapper[4946]: I1204 16:02:14.203261 4946 generic.go:334] "Generic (PLEG): container finished" podID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" containerID="2d27abf9a2ff6857637b759cce4e64c0a7bbb386e254847b1b11d1f4e44960c0" exitCode=0 Dec 04 16:02:14 crc kubenswrapper[4946]: I1204 16:02:14.203404 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-djt66" event={"ID":"c00f6e37-9ae9-4b26-8efe-237923f4dbae","Type":"ContainerDied","Data":"2d27abf9a2ff6857637b759cce4e64c0a7bbb386e254847b1b11d1f4e44960c0"} Dec 04 16:02:14 crc kubenswrapper[4946]: I1204 16:02:14.203795 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-djt66" event={"ID":"c00f6e37-9ae9-4b26-8efe-237923f4dbae","Type":"ContainerStarted","Data":"91f4892ef8bfe5f0b95bb91ef549fca319c8becce5ad770956d79f13c51df6db"} Dec 04 16:02:14 crc kubenswrapper[4946]: I1204 16:02:14.207247 4946 generic.go:334] "Generic (PLEG): container finished" podID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" containerID="db1f8970c6afc19be71b9bdca4f96e4f9ee8791721ac7a14263bf49c76f89d5c" exitCode=0 Dec 04 16:02:14 crc kubenswrapper[4946]: I1204 16:02:14.207319 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ff888" event={"ID":"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2","Type":"ContainerDied","Data":"db1f8970c6afc19be71b9bdca4f96e4f9ee8791721ac7a14263bf49c76f89d5c"} Dec 04 16:02:15 crc kubenswrapper[4946]: I1204 16:02:15.220011 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-djt66" event={"ID":"c00f6e37-9ae9-4b26-8efe-237923f4dbae","Type":"ContainerStarted","Data":"a0dd3d8e3d7d4c24c8f277b6a337930567595084def703e7d0404f8c51458b74"} Dec 04 16:02:15 crc kubenswrapper[4946]: I1204 16:02:15.222627 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ff888" event={"ID":"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2","Type":"ContainerStarted","Data":"d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c"} Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.249422 4946 generic.go:334] "Generic (PLEG): container finished" podID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" containerID="d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c" exitCode=0 Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.249983 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ff888" event={"ID":"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2","Type":"ContainerDied","Data":"d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c"} Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.257542 4946 generic.go:334] "Generic (PLEG): container finished" podID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" containerID="a0dd3d8e3d7d4c24c8f277b6a337930567595084def703e7d0404f8c51458b74" exitCode=0 Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.257604 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-djt66" event={"ID":"c00f6e37-9ae9-4b26-8efe-237923f4dbae","Type":"ContainerDied","Data":"a0dd3d8e3d7d4c24c8f277b6a337930567595084def703e7d0404f8c51458b74"} Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.649519 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8qm9f/must-gather-hqmjm"] Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.651628 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/must-gather-hqmjm" Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.655846 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-8qm9f"/"kube-root-ca.crt" Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.656270 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-8qm9f"/"openshift-service-ca.crt" Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.658103 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-8qm9f"/"default-dockercfg-b8bbl" Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.708213 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-8qm9f/must-gather-hqmjm"] Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.768419 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/582095c0-2850-4d2e-869e-bb61c982f955-must-gather-output\") pod \"must-gather-hqmjm\" (UID: \"582095c0-2850-4d2e-869e-bb61c982f955\") " pod="openshift-must-gather-8qm9f/must-gather-hqmjm" Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.768712 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptdmd\" (UniqueName: \"kubernetes.io/projected/582095c0-2850-4d2e-869e-bb61c982f955-kube-api-access-ptdmd\") pod \"must-gather-hqmjm\" (UID: \"582095c0-2850-4d2e-869e-bb61c982f955\") " pod="openshift-must-gather-8qm9f/must-gather-hqmjm" Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.872021 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/582095c0-2850-4d2e-869e-bb61c982f955-must-gather-output\") pod \"must-gather-hqmjm\" (UID: \"582095c0-2850-4d2e-869e-bb61c982f955\") " pod="openshift-must-gather-8qm9f/must-gather-hqmjm" Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.872882 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptdmd\" (UniqueName: \"kubernetes.io/projected/582095c0-2850-4d2e-869e-bb61c982f955-kube-api-access-ptdmd\") pod \"must-gather-hqmjm\" (UID: \"582095c0-2850-4d2e-869e-bb61c982f955\") " pod="openshift-must-gather-8qm9f/must-gather-hqmjm" Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.874100 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/582095c0-2850-4d2e-869e-bb61c982f955-must-gather-output\") pod \"must-gather-hqmjm\" (UID: \"582095c0-2850-4d2e-869e-bb61c982f955\") " pod="openshift-must-gather-8qm9f/must-gather-hqmjm" Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.911021 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptdmd\" (UniqueName: \"kubernetes.io/projected/582095c0-2850-4d2e-869e-bb61c982f955-kube-api-access-ptdmd\") pod \"must-gather-hqmjm\" (UID: \"582095c0-2850-4d2e-869e-bb61c982f955\") " pod="openshift-must-gather-8qm9f/must-gather-hqmjm" Dec 04 16:02:17 crc kubenswrapper[4946]: I1204 16:02:17.975639 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/must-gather-hqmjm" Dec 04 16:02:19 crc kubenswrapper[4946]: I1204 16:02:19.447446 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-8qm9f/must-gather-hqmjm"] Dec 04 16:02:20 crc kubenswrapper[4946]: I1204 16:02:20.306804 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8qm9f/must-gather-hqmjm" event={"ID":"582095c0-2850-4d2e-869e-bb61c982f955","Type":"ContainerStarted","Data":"e6dab2a00cb0110e7241485a866eb33f314dc46c8187eb13cf51baea6c93cb3e"} Dec 04 16:02:20 crc kubenswrapper[4946]: I1204 16:02:20.309788 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-djt66" event={"ID":"c00f6e37-9ae9-4b26-8efe-237923f4dbae","Type":"ContainerStarted","Data":"98c0ff0a0c7c7f3f5b7e65416ce8639aa330d8a98a513ac6a4fa8b4e875da8a8"} Dec 04 16:02:23 crc kubenswrapper[4946]: I1204 16:02:23.352755 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ff888" event={"ID":"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2","Type":"ContainerStarted","Data":"909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035"} Dec 04 16:02:23 crc kubenswrapper[4946]: I1204 16:02:23.387380 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-djt66" podStartSLOduration=8.863512944 podStartE2EDuration="12.38735921s" podCreationTimestamp="2025-12-04 16:02:11 +0000 UTC" firstStartedPulling="2025-12-04 16:02:14.206553161 +0000 UTC m=+3585.092596802" lastFinishedPulling="2025-12-04 16:02:17.730399427 +0000 UTC m=+3588.616443068" observedRunningTime="2025-12-04 16:02:23.375883638 +0000 UTC m=+3594.261927269" watchObservedRunningTime="2025-12-04 16:02:23.38735921 +0000 UTC m=+3594.273402851" Dec 04 16:02:23 crc kubenswrapper[4946]: I1204 16:02:23.398112 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ff888" podStartSLOduration=8.951877407 podStartE2EDuration="12.398091992s" podCreationTimestamp="2025-12-04 16:02:11 +0000 UTC" firstStartedPulling="2025-12-04 16:02:14.21033571 +0000 UTC m=+3585.096379381" lastFinishedPulling="2025-12-04 16:02:17.656550325 +0000 UTC m=+3588.542593966" observedRunningTime="2025-12-04 16:02:23.39647987 +0000 UTC m=+3594.282523521" watchObservedRunningTime="2025-12-04 16:02:23.398091992 +0000 UTC m=+3594.284135633" Dec 04 16:02:29 crc kubenswrapper[4946]: I1204 16:02:29.423463 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8qm9f/must-gather-hqmjm" event={"ID":"582095c0-2850-4d2e-869e-bb61c982f955","Type":"ContainerStarted","Data":"e2ca6b4b6330db67bb69a3669847f668b5afcc742ea45699e888acbcdd17cbf9"} Dec 04 16:02:30 crc kubenswrapper[4946]: I1204 16:02:30.434302 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8qm9f/must-gather-hqmjm" event={"ID":"582095c0-2850-4d2e-869e-bb61c982f955","Type":"ContainerStarted","Data":"306a76101335d5c82988ce9b793966d8beaf0c2bd1607be17304e637f7dc2984"} Dec 04 16:02:30 crc kubenswrapper[4946]: I1204 16:02:30.456990 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-8qm9f/must-gather-hqmjm" podStartSLOduration=4.560627324 podStartE2EDuration="13.456966149s" podCreationTimestamp="2025-12-04 16:02:17 +0000 UTC" firstStartedPulling="2025-12-04 16:02:19.456013696 +0000 UTC m=+3590.342057337" lastFinishedPulling="2025-12-04 16:02:28.352352521 +0000 UTC m=+3599.238396162" observedRunningTime="2025-12-04 16:02:30.449107962 +0000 UTC m=+3601.335151603" watchObservedRunningTime="2025-12-04 16:02:30.456966149 +0000 UTC m=+3601.343009790" Dec 04 16:02:32 crc kubenswrapper[4946]: I1204 16:02:32.085237 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:32 crc kubenswrapper[4946]: I1204 16:02:32.085849 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:32 crc kubenswrapper[4946]: I1204 16:02:32.151940 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:32 crc kubenswrapper[4946]: I1204 16:02:32.270799 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:32 crc kubenswrapper[4946]: I1204 16:02:32.270942 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:32 crc kubenswrapper[4946]: I1204 16:02:32.343130 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:32 crc kubenswrapper[4946]: I1204 16:02:32.522460 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:32 crc kubenswrapper[4946]: I1204 16:02:32.524999 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:33 crc kubenswrapper[4946]: I1204 16:02:33.216422 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8qm9f/crc-debug-kdrsb"] Dec 04 16:02:33 crc kubenswrapper[4946]: I1204 16:02:33.218487 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" Dec 04 16:02:33 crc kubenswrapper[4946]: I1204 16:02:33.359624 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-host\") pod \"crc-debug-kdrsb\" (UID: \"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6\") " pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" Dec 04 16:02:33 crc kubenswrapper[4946]: I1204 16:02:33.359712 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk2tn\" (UniqueName: \"kubernetes.io/projected/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-kube-api-access-kk2tn\") pod \"crc-debug-kdrsb\" (UID: \"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6\") " pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" Dec 04 16:02:33 crc kubenswrapper[4946]: I1204 16:02:33.462980 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk2tn\" (UniqueName: \"kubernetes.io/projected/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-kube-api-access-kk2tn\") pod \"crc-debug-kdrsb\" (UID: \"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6\") " pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" Dec 04 16:02:33 crc kubenswrapper[4946]: I1204 16:02:33.463301 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-host\") pod \"crc-debug-kdrsb\" (UID: \"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6\") " pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" Dec 04 16:02:33 crc kubenswrapper[4946]: I1204 16:02:33.463455 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-host\") pod \"crc-debug-kdrsb\" (UID: \"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6\") " pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" Dec 04 16:02:33 crc kubenswrapper[4946]: I1204 16:02:33.483704 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk2tn\" (UniqueName: \"kubernetes.io/projected/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-kube-api-access-kk2tn\") pod \"crc-debug-kdrsb\" (UID: \"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6\") " pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" Dec 04 16:02:33 crc kubenswrapper[4946]: I1204 16:02:33.540984 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" Dec 04 16:02:33 crc kubenswrapper[4946]: W1204 16:02:33.598675 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb20312d5_29f8_4c19_a6dd_13d9ef5ffbe6.slice/crio-03f4070be78a71c00be2d312cb45b91ce0b2f9b996f8fe6404b1dff1ba82dab6 WatchSource:0}: Error finding container 03f4070be78a71c00be2d312cb45b91ce0b2f9b996f8fe6404b1dff1ba82dab6: Status 404 returned error can't find the container with id 03f4070be78a71c00be2d312cb45b91ce0b2f9b996f8fe6404b1dff1ba82dab6 Dec 04 16:02:34 crc kubenswrapper[4946]: I1204 16:02:34.199737 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ff888"] Dec 04 16:02:34 crc kubenswrapper[4946]: I1204 16:02:34.483174 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" event={"ID":"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6","Type":"ContainerStarted","Data":"03f4070be78a71c00be2d312cb45b91ce0b2f9b996f8fe6404b1dff1ba82dab6"} Dec 04 16:02:34 crc kubenswrapper[4946]: I1204 16:02:34.804321 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-djt66"] Dec 04 16:02:34 crc kubenswrapper[4946]: I1204 16:02:34.804952 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-djt66" podUID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" containerName="registry-server" containerID="cri-o://98c0ff0a0c7c7f3f5b7e65416ce8639aa330d8a98a513ac6a4fa8b4e875da8a8" gracePeriod=2 Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.506462 4946 generic.go:334] "Generic (PLEG): container finished" podID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" containerID="98c0ff0a0c7c7f3f5b7e65416ce8639aa330d8a98a513ac6a4fa8b4e875da8a8" exitCode=0 Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.507067 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-djt66" event={"ID":"c00f6e37-9ae9-4b26-8efe-237923f4dbae","Type":"ContainerDied","Data":"98c0ff0a0c7c7f3f5b7e65416ce8639aa330d8a98a513ac6a4fa8b4e875da8a8"} Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.507492 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ff888" podUID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" containerName="registry-server" containerID="cri-o://909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035" gracePeriod=2 Dec 04 16:02:35 crc kubenswrapper[4946]: E1204 16:02:35.730706 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5a3c9c9_c514_458f_8ae0_c955ee7f2dd2.slice/crio-909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5a3c9c9_c514_458f_8ae0_c955ee7f2dd2.slice/crio-conmon-909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035.scope\": RecentStats: unable to find data in memory cache]" Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.814748 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.835451 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm24k\" (UniqueName: \"kubernetes.io/projected/c00f6e37-9ae9-4b26-8efe-237923f4dbae-kube-api-access-zm24k\") pod \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.835526 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-utilities\") pod \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.835555 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-catalog-content\") pod \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\" (UID: \"c00f6e37-9ae9-4b26-8efe-237923f4dbae\") " Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.837366 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-utilities" (OuterVolumeSpecName: "utilities") pod "c00f6e37-9ae9-4b26-8efe-237923f4dbae" (UID: "c00f6e37-9ae9-4b26-8efe-237923f4dbae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.853382 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c00f6e37-9ae9-4b26-8efe-237923f4dbae-kube-api-access-zm24k" (OuterVolumeSpecName: "kube-api-access-zm24k") pod "c00f6e37-9ae9-4b26-8efe-237923f4dbae" (UID: "c00f6e37-9ae9-4b26-8efe-237923f4dbae"). InnerVolumeSpecName "kube-api-access-zm24k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.924847 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c00f6e37-9ae9-4b26-8efe-237923f4dbae" (UID: "c00f6e37-9ae9-4b26-8efe-237923f4dbae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.939645 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm24k\" (UniqueName: \"kubernetes.io/projected/c00f6e37-9ae9-4b26-8efe-237923f4dbae-kube-api-access-zm24k\") on node \"crc\" DevicePath \"\"" Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.939693 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 16:02:35 crc kubenswrapper[4946]: I1204 16:02:35.939704 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c00f6e37-9ae9-4b26-8efe-237923f4dbae-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.261930 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.350950 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-catalog-content\") pod \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.351077 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hflcw\" (UniqueName: \"kubernetes.io/projected/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-kube-api-access-hflcw\") pod \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.351237 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-utilities\") pod \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\" (UID: \"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2\") " Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.351879 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-utilities" (OuterVolumeSpecName: "utilities") pod "a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" (UID: "a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.352295 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.356162 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-kube-api-access-hflcw" (OuterVolumeSpecName: "kube-api-access-hflcw") pod "a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" (UID: "a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2"). InnerVolumeSpecName "kube-api-access-hflcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.402144 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" (UID: "a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.454898 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hflcw\" (UniqueName: \"kubernetes.io/projected/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-kube-api-access-hflcw\") on node \"crc\" DevicePath \"\"" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.454939 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.525603 4946 generic.go:334] "Generic (PLEG): container finished" podID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" containerID="909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035" exitCode=0 Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.525816 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ff888" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.525860 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ff888" event={"ID":"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2","Type":"ContainerDied","Data":"909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035"} Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.527205 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ff888" event={"ID":"a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2","Type":"ContainerDied","Data":"7d2ff06b30c9112035a9b4e61f543ef0ffa6b7fa34ce986e1ac212503f83df97"} Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.527256 4946 scope.go:117] "RemoveContainer" containerID="909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.536425 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-djt66" event={"ID":"c00f6e37-9ae9-4b26-8efe-237923f4dbae","Type":"ContainerDied","Data":"91f4892ef8bfe5f0b95bb91ef549fca319c8becce5ad770956d79f13c51df6db"} Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.536550 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-djt66" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.576334 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ff888"] Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.580800 4946 scope.go:117] "RemoveContainer" containerID="d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.610336 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ff888"] Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.639636 4946 scope.go:117] "RemoveContainer" containerID="db1f8970c6afc19be71b9bdca4f96e4f9ee8791721ac7a14263bf49c76f89d5c" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.645580 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-djt66"] Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.659997 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-djt66"] Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.716846 4946 scope.go:117] "RemoveContainer" containerID="909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035" Dec 04 16:02:36 crc kubenswrapper[4946]: E1204 16:02:36.721396 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035\": container with ID starting with 909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035 not found: ID does not exist" containerID="909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.721457 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035"} err="failed to get container status \"909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035\": rpc error: code = NotFound desc = could not find container \"909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035\": container with ID starting with 909acaa1c966490476813a4ef0264373f6a888c0f6573c9dbf926ee50452c035 not found: ID does not exist" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.721498 4946 scope.go:117] "RemoveContainer" containerID="d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c" Dec 04 16:02:36 crc kubenswrapper[4946]: E1204 16:02:36.722406 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c\": container with ID starting with d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c not found: ID does not exist" containerID="d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.722433 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c"} err="failed to get container status \"d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c\": rpc error: code = NotFound desc = could not find container \"d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c\": container with ID starting with d94208900bbdc1c8d570c0a10e43ed87eebb95c9ead871b6458e9105ab23c89c not found: ID does not exist" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.722455 4946 scope.go:117] "RemoveContainer" containerID="db1f8970c6afc19be71b9bdca4f96e4f9ee8791721ac7a14263bf49c76f89d5c" Dec 04 16:02:36 crc kubenswrapper[4946]: E1204 16:02:36.727523 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db1f8970c6afc19be71b9bdca4f96e4f9ee8791721ac7a14263bf49c76f89d5c\": container with ID starting with db1f8970c6afc19be71b9bdca4f96e4f9ee8791721ac7a14263bf49c76f89d5c not found: ID does not exist" containerID="db1f8970c6afc19be71b9bdca4f96e4f9ee8791721ac7a14263bf49c76f89d5c" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.727554 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db1f8970c6afc19be71b9bdca4f96e4f9ee8791721ac7a14263bf49c76f89d5c"} err="failed to get container status \"db1f8970c6afc19be71b9bdca4f96e4f9ee8791721ac7a14263bf49c76f89d5c\": rpc error: code = NotFound desc = could not find container \"db1f8970c6afc19be71b9bdca4f96e4f9ee8791721ac7a14263bf49c76f89d5c\": container with ID starting with db1f8970c6afc19be71b9bdca4f96e4f9ee8791721ac7a14263bf49c76f89d5c not found: ID does not exist" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.727575 4946 scope.go:117] "RemoveContainer" containerID="98c0ff0a0c7c7f3f5b7e65416ce8639aa330d8a98a513ac6a4fa8b4e875da8a8" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.809410 4946 scope.go:117] "RemoveContainer" containerID="a0dd3d8e3d7d4c24c8f277b6a337930567595084def703e7d0404f8c51458b74" Dec 04 16:02:36 crc kubenswrapper[4946]: I1204 16:02:36.841593 4946 scope.go:117] "RemoveContainer" containerID="2d27abf9a2ff6857637b759cce4e64c0a7bbb386e254847b1b11d1f4e44960c0" Dec 04 16:02:37 crc kubenswrapper[4946]: I1204 16:02:37.471410 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" path="/var/lib/kubelet/pods/a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2/volumes" Dec 04 16:02:37 crc kubenswrapper[4946]: I1204 16:02:37.473002 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" path="/var/lib/kubelet/pods/c00f6e37-9ae9-4b26-8efe-237923f4dbae/volumes" Dec 04 16:02:52 crc kubenswrapper[4946]: E1204 16:02:52.859454 4946 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296" Dec 04 16:02:52 crc kubenswrapper[4946]: E1204 16:02:52.860476 4946 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:container-00,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296,Command:[chroot /host bash -c echo 'TOOLBOX_NAME=toolbox-osp' > /root/.toolboxrc ; rm -rf \"/var/tmp/sos-osp\" && mkdir -p \"/var/tmp/sos-osp\" && sudo podman rm --force toolbox-osp; sudo --preserve-env podman pull --authfile /var/lib/kubelet/config.json registry.redhat.io/rhel9/support-tools && toolbox sos report --batch --all-logs --only-plugins block,cifs,crio,devicemapper,devices,firewall_tables,firewalld,iscsi,lvm2,memory,multipath,nfs,nis,nvme,podman,process,processor,selinux,scsi,udev,logs,crypto --tmp-dir=\"/var/tmp/sos-osp\" && if [[ \"$(ls /var/log/pods/*/{*.log.*,*/*.log.*} 2>/dev/null)\" != '' ]]; then tar --ignore-failed-read --warning=no-file-changed -cJf \"/var/tmp/sos-osp/podlogs.tar.xz\" --transform 's,^,podlogs/,' /var/log/pods/*/{*.log.*,*/*.log.*} || true; fi],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:TMOUT,Value:900,ValueFrom:nil,},EnvVar{Name:HOST,Value:/host,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host,ReadOnly:false,MountPath:/host,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kk2tn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod crc-debug-kdrsb_openshift-must-gather-8qm9f(b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 16:02:52 crc kubenswrapper[4946]: E1204 16:02:52.863749 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" podUID="b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6" Dec 04 16:02:53 crc kubenswrapper[4946]: E1204 16:02:53.801601 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296\\\"\"" pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" podUID="b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6" Dec 04 16:03:09 crc kubenswrapper[4946]: I1204 16:03:09.979886 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" event={"ID":"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6","Type":"ContainerStarted","Data":"039457f6257a78c742abaf5dd55e7a974f673504915be1c6618be8087d29a4b6"} Dec 04 16:03:09 crc kubenswrapper[4946]: I1204 16:03:09.998685 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" podStartSLOduration=1.700399842 podStartE2EDuration="36.99866642s" podCreationTimestamp="2025-12-04 16:02:33 +0000 UTC" firstStartedPulling="2025-12-04 16:02:33.603441679 +0000 UTC m=+3604.489485320" lastFinishedPulling="2025-12-04 16:03:08.901708257 +0000 UTC m=+3639.787751898" observedRunningTime="2025-12-04 16:03:09.995033789 +0000 UTC m=+3640.881077430" watchObservedRunningTime="2025-12-04 16:03:09.99866642 +0000 UTC m=+3640.884710061" Dec 04 16:03:22 crc kubenswrapper[4946]: I1204 16:03:22.478648 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:03:22 crc kubenswrapper[4946]: I1204 16:03:22.479317 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:03:52 crc kubenswrapper[4946]: I1204 16:03:52.478584 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:03:52 crc kubenswrapper[4946]: I1204 16:03:52.479772 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:04:04 crc kubenswrapper[4946]: I1204 16:04:04.707596 4946 generic.go:334] "Generic (PLEG): container finished" podID="b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6" containerID="039457f6257a78c742abaf5dd55e7a974f673504915be1c6618be8087d29a4b6" exitCode=0 Dec 04 16:04:04 crc kubenswrapper[4946]: I1204 16:04:04.708188 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" event={"ID":"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6","Type":"ContainerDied","Data":"039457f6257a78c742abaf5dd55e7a974f673504915be1c6618be8087d29a4b6"} Dec 04 16:04:05 crc kubenswrapper[4946]: I1204 16:04:05.856348 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" Dec 04 16:04:05 crc kubenswrapper[4946]: I1204 16:04:05.881876 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kk2tn\" (UniqueName: \"kubernetes.io/projected/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-kube-api-access-kk2tn\") pod \"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6\" (UID: \"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6\") " Dec 04 16:04:05 crc kubenswrapper[4946]: I1204 16:04:05.882177 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-host\") pod \"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6\" (UID: \"b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6\") " Dec 04 16:04:05 crc kubenswrapper[4946]: I1204 16:04:05.882457 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-host" (OuterVolumeSpecName: "host") pod "b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6" (UID: "b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 16:04:05 crc kubenswrapper[4946]: I1204 16:04:05.882968 4946 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-host\") on node \"crc\" DevicePath \"\"" Dec 04 16:04:05 crc kubenswrapper[4946]: I1204 16:04:05.892042 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-kube-api-access-kk2tn" (OuterVolumeSpecName: "kube-api-access-kk2tn") pod "b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6" (UID: "b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6"). InnerVolumeSpecName "kube-api-access-kk2tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:04:05 crc kubenswrapper[4946]: I1204 16:04:05.916043 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8qm9f/crc-debug-kdrsb"] Dec 04 16:04:05 crc kubenswrapper[4946]: I1204 16:04:05.929276 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8qm9f/crc-debug-kdrsb"] Dec 04 16:04:05 crc kubenswrapper[4946]: I1204 16:04:05.985781 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kk2tn\" (UniqueName: \"kubernetes.io/projected/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6-kube-api-access-kk2tn\") on node \"crc\" DevicePath \"\"" Dec 04 16:04:06 crc kubenswrapper[4946]: I1204 16:04:06.739970 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03f4070be78a71c00be2d312cb45b91ce0b2f9b996f8fe6404b1dff1ba82dab6" Dec 04 16:04:06 crc kubenswrapper[4946]: I1204 16:04:06.740087 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-kdrsb" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.206035 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8qm9f/crc-debug-ksrwh"] Dec 04 16:04:07 crc kubenswrapper[4946]: E1204 16:04:07.206665 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" containerName="extract-utilities" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.206684 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" containerName="extract-utilities" Dec 04 16:04:07 crc kubenswrapper[4946]: E1204 16:04:07.206699 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" containerName="extract-content" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.206706 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" containerName="extract-content" Dec 04 16:04:07 crc kubenswrapper[4946]: E1204 16:04:07.206738 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" containerName="extract-utilities" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.206745 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" containerName="extract-utilities" Dec 04 16:04:07 crc kubenswrapper[4946]: E1204 16:04:07.206759 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6" containerName="container-00" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.206766 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6" containerName="container-00" Dec 04 16:04:07 crc kubenswrapper[4946]: E1204 16:04:07.206790 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" containerName="extract-content" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.206796 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" containerName="extract-content" Dec 04 16:04:07 crc kubenswrapper[4946]: E1204 16:04:07.206807 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" containerName="registry-server" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.206814 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" containerName="registry-server" Dec 04 16:04:07 crc kubenswrapper[4946]: E1204 16:04:07.206827 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" containerName="registry-server" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.206833 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" containerName="registry-server" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.207062 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5a3c9c9-c514-458f-8ae0-c955ee7f2dd2" containerName="registry-server" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.207085 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c00f6e37-9ae9-4b26-8efe-237923f4dbae" containerName="registry-server" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.207103 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6" containerName="container-00" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.208142 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.317535 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/625423c5-3034-40e5-85eb-706f51883960-host\") pod \"crc-debug-ksrwh\" (UID: \"625423c5-3034-40e5-85eb-706f51883960\") " pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.317618 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5m5h\" (UniqueName: \"kubernetes.io/projected/625423c5-3034-40e5-85eb-706f51883960-kube-api-access-f5m5h\") pod \"crc-debug-ksrwh\" (UID: \"625423c5-3034-40e5-85eb-706f51883960\") " pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.421020 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/625423c5-3034-40e5-85eb-706f51883960-host\") pod \"crc-debug-ksrwh\" (UID: \"625423c5-3034-40e5-85eb-706f51883960\") " pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.421098 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5m5h\" (UniqueName: \"kubernetes.io/projected/625423c5-3034-40e5-85eb-706f51883960-kube-api-access-f5m5h\") pod \"crc-debug-ksrwh\" (UID: \"625423c5-3034-40e5-85eb-706f51883960\") " pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.421205 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/625423c5-3034-40e5-85eb-706f51883960-host\") pod \"crc-debug-ksrwh\" (UID: \"625423c5-3034-40e5-85eb-706f51883960\") " pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.482961 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5m5h\" (UniqueName: \"kubernetes.io/projected/625423c5-3034-40e5-85eb-706f51883960-kube-api-access-f5m5h\") pod \"crc-debug-ksrwh\" (UID: \"625423c5-3034-40e5-85eb-706f51883960\") " pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.493350 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6" path="/var/lib/kubelet/pods/b20312d5-29f8-4c19-a6dd-13d9ef5ffbe6/volumes" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.536071 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" Dec 04 16:04:07 crc kubenswrapper[4946]: I1204 16:04:07.772334 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" event={"ID":"625423c5-3034-40e5-85eb-706f51883960","Type":"ContainerStarted","Data":"deca636e7a186ba6cd7d4ad300fcb6fb813d4de5eeb29854a96a5c877dccfcf6"} Dec 04 16:04:08 crc kubenswrapper[4946]: I1204 16:04:08.785421 4946 generic.go:334] "Generic (PLEG): container finished" podID="625423c5-3034-40e5-85eb-706f51883960" containerID="b7f34d1fa2c8fbf441461e9d43e03142e71bf37399f128e197b1b4cea80c0960" exitCode=0 Dec 04 16:04:08 crc kubenswrapper[4946]: I1204 16:04:08.785540 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" event={"ID":"625423c5-3034-40e5-85eb-706f51883960","Type":"ContainerDied","Data":"b7f34d1fa2c8fbf441461e9d43e03142e71bf37399f128e197b1b4cea80c0960"} Dec 04 16:04:09 crc kubenswrapper[4946]: I1204 16:04:09.918587 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" Dec 04 16:04:10 crc kubenswrapper[4946]: I1204 16:04:10.001810 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5m5h\" (UniqueName: \"kubernetes.io/projected/625423c5-3034-40e5-85eb-706f51883960-kube-api-access-f5m5h\") pod \"625423c5-3034-40e5-85eb-706f51883960\" (UID: \"625423c5-3034-40e5-85eb-706f51883960\") " Dec 04 16:04:10 crc kubenswrapper[4946]: I1204 16:04:10.002341 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/625423c5-3034-40e5-85eb-706f51883960-host\") pod \"625423c5-3034-40e5-85eb-706f51883960\" (UID: \"625423c5-3034-40e5-85eb-706f51883960\") " Dec 04 16:04:10 crc kubenswrapper[4946]: I1204 16:04:10.002458 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/625423c5-3034-40e5-85eb-706f51883960-host" (OuterVolumeSpecName: "host") pod "625423c5-3034-40e5-85eb-706f51883960" (UID: "625423c5-3034-40e5-85eb-706f51883960"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 16:04:10 crc kubenswrapper[4946]: I1204 16:04:10.003083 4946 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/625423c5-3034-40e5-85eb-706f51883960-host\") on node \"crc\" DevicePath \"\"" Dec 04 16:04:10 crc kubenswrapper[4946]: I1204 16:04:10.020040 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/625423c5-3034-40e5-85eb-706f51883960-kube-api-access-f5m5h" (OuterVolumeSpecName: "kube-api-access-f5m5h") pod "625423c5-3034-40e5-85eb-706f51883960" (UID: "625423c5-3034-40e5-85eb-706f51883960"). InnerVolumeSpecName "kube-api-access-f5m5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:04:10 crc kubenswrapper[4946]: I1204 16:04:10.047564 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8qm9f/crc-debug-ksrwh"] Dec 04 16:04:10 crc kubenswrapper[4946]: I1204 16:04:10.057851 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8qm9f/crc-debug-ksrwh"] Dec 04 16:04:10 crc kubenswrapper[4946]: I1204 16:04:10.104894 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5m5h\" (UniqueName: \"kubernetes.io/projected/625423c5-3034-40e5-85eb-706f51883960-kube-api-access-f5m5h\") on node \"crc\" DevicePath \"\"" Dec 04 16:04:10 crc kubenswrapper[4946]: I1204 16:04:10.818477 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="deca636e7a186ba6cd7d4ad300fcb6fb813d4de5eeb29854a96a5c877dccfcf6" Dec 04 16:04:10 crc kubenswrapper[4946]: I1204 16:04:10.818603 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-ksrwh" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.243298 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8qm9f/crc-debug-p8qps"] Dec 04 16:04:11 crc kubenswrapper[4946]: E1204 16:04:11.243974 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="625423c5-3034-40e5-85eb-706f51883960" containerName="container-00" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.244012 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="625423c5-3034-40e5-85eb-706f51883960" containerName="container-00" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.244307 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="625423c5-3034-40e5-85eb-706f51883960" containerName="container-00" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.245713 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-p8qps" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.336424 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdhc7\" (UniqueName: \"kubernetes.io/projected/798180d6-b704-43bf-b80e-bfee9add7a03-kube-api-access-fdhc7\") pod \"crc-debug-p8qps\" (UID: \"798180d6-b704-43bf-b80e-bfee9add7a03\") " pod="openshift-must-gather-8qm9f/crc-debug-p8qps" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.337030 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/798180d6-b704-43bf-b80e-bfee9add7a03-host\") pod \"crc-debug-p8qps\" (UID: \"798180d6-b704-43bf-b80e-bfee9add7a03\") " pod="openshift-must-gather-8qm9f/crc-debug-p8qps" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.439561 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/798180d6-b704-43bf-b80e-bfee9add7a03-host\") pod \"crc-debug-p8qps\" (UID: \"798180d6-b704-43bf-b80e-bfee9add7a03\") " pod="openshift-must-gather-8qm9f/crc-debug-p8qps" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.439680 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdhc7\" (UniqueName: \"kubernetes.io/projected/798180d6-b704-43bf-b80e-bfee9add7a03-kube-api-access-fdhc7\") pod \"crc-debug-p8qps\" (UID: \"798180d6-b704-43bf-b80e-bfee9add7a03\") " pod="openshift-must-gather-8qm9f/crc-debug-p8qps" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.440181 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/798180d6-b704-43bf-b80e-bfee9add7a03-host\") pod \"crc-debug-p8qps\" (UID: \"798180d6-b704-43bf-b80e-bfee9add7a03\") " pod="openshift-must-gather-8qm9f/crc-debug-p8qps" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.462487 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdhc7\" (UniqueName: \"kubernetes.io/projected/798180d6-b704-43bf-b80e-bfee9add7a03-kube-api-access-fdhc7\") pod \"crc-debug-p8qps\" (UID: \"798180d6-b704-43bf-b80e-bfee9add7a03\") " pod="openshift-must-gather-8qm9f/crc-debug-p8qps" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.502414 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="625423c5-3034-40e5-85eb-706f51883960" path="/var/lib/kubelet/pods/625423c5-3034-40e5-85eb-706f51883960/volumes" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.566054 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-p8qps" Dec 04 16:04:11 crc kubenswrapper[4946]: I1204 16:04:11.832205 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8qm9f/crc-debug-p8qps" event={"ID":"798180d6-b704-43bf-b80e-bfee9add7a03","Type":"ContainerStarted","Data":"bab6a8d8d2a34d282ab3294a0b8d5ed898228e27a750992f454ebdd5fc6a8ec1"} Dec 04 16:04:12 crc kubenswrapper[4946]: I1204 16:04:12.846654 4946 generic.go:334] "Generic (PLEG): container finished" podID="798180d6-b704-43bf-b80e-bfee9add7a03" containerID="3655a70d0d40d3059cf66af2ba82baf9b8dda21fe342686c6eab48270da59580" exitCode=0 Dec 04 16:04:12 crc kubenswrapper[4946]: I1204 16:04:12.846784 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8qm9f/crc-debug-p8qps" event={"ID":"798180d6-b704-43bf-b80e-bfee9add7a03","Type":"ContainerDied","Data":"3655a70d0d40d3059cf66af2ba82baf9b8dda21fe342686c6eab48270da59580"} Dec 04 16:04:12 crc kubenswrapper[4946]: I1204 16:04:12.898065 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8qm9f/crc-debug-p8qps"] Dec 04 16:04:12 crc kubenswrapper[4946]: I1204 16:04:12.909393 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8qm9f/crc-debug-p8qps"] Dec 04 16:04:14 crc kubenswrapper[4946]: I1204 16:04:14.019829 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-p8qps" Dec 04 16:04:14 crc kubenswrapper[4946]: I1204 16:04:14.117237 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/798180d6-b704-43bf-b80e-bfee9add7a03-host\") pod \"798180d6-b704-43bf-b80e-bfee9add7a03\" (UID: \"798180d6-b704-43bf-b80e-bfee9add7a03\") " Dec 04 16:04:14 crc kubenswrapper[4946]: I1204 16:04:14.117413 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/798180d6-b704-43bf-b80e-bfee9add7a03-host" (OuterVolumeSpecName: "host") pod "798180d6-b704-43bf-b80e-bfee9add7a03" (UID: "798180d6-b704-43bf-b80e-bfee9add7a03"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 16:04:14 crc kubenswrapper[4946]: I1204 16:04:14.117503 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdhc7\" (UniqueName: \"kubernetes.io/projected/798180d6-b704-43bf-b80e-bfee9add7a03-kube-api-access-fdhc7\") pod \"798180d6-b704-43bf-b80e-bfee9add7a03\" (UID: \"798180d6-b704-43bf-b80e-bfee9add7a03\") " Dec 04 16:04:14 crc kubenswrapper[4946]: I1204 16:04:14.118920 4946 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/798180d6-b704-43bf-b80e-bfee9add7a03-host\") on node \"crc\" DevicePath \"\"" Dec 04 16:04:14 crc kubenswrapper[4946]: I1204 16:04:14.135423 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/798180d6-b704-43bf-b80e-bfee9add7a03-kube-api-access-fdhc7" (OuterVolumeSpecName: "kube-api-access-fdhc7") pod "798180d6-b704-43bf-b80e-bfee9add7a03" (UID: "798180d6-b704-43bf-b80e-bfee9add7a03"). InnerVolumeSpecName "kube-api-access-fdhc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:04:14 crc kubenswrapper[4946]: I1204 16:04:14.221987 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdhc7\" (UniqueName: \"kubernetes.io/projected/798180d6-b704-43bf-b80e-bfee9add7a03-kube-api-access-fdhc7\") on node \"crc\" DevicePath \"\"" Dec 04 16:04:14 crc kubenswrapper[4946]: I1204 16:04:14.870143 4946 scope.go:117] "RemoveContainer" containerID="3655a70d0d40d3059cf66af2ba82baf9b8dda21fe342686c6eab48270da59580" Dec 04 16:04:14 crc kubenswrapper[4946]: I1204 16:04:14.870338 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/crc-debug-p8qps" Dec 04 16:04:15 crc kubenswrapper[4946]: I1204 16:04:15.466207 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="798180d6-b704-43bf-b80e-bfee9add7a03" path="/var/lib/kubelet/pods/798180d6-b704-43bf-b80e-bfee9add7a03/volumes" Dec 04 16:04:22 crc kubenswrapper[4946]: I1204 16:04:22.479276 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:04:22 crc kubenswrapper[4946]: I1204 16:04:22.480167 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:04:22 crc kubenswrapper[4946]: I1204 16:04:22.480224 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 16:04:22 crc kubenswrapper[4946]: I1204 16:04:22.481862 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 16:04:22 crc kubenswrapper[4946]: I1204 16:04:22.481941 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" gracePeriod=600 Dec 04 16:04:22 crc kubenswrapper[4946]: E1204 16:04:22.707074 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:04:22 crc kubenswrapper[4946]: I1204 16:04:22.966437 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" exitCode=0 Dec 04 16:04:22 crc kubenswrapper[4946]: I1204 16:04:22.966514 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0"} Dec 04 16:04:22 crc kubenswrapper[4946]: I1204 16:04:22.966980 4946 scope.go:117] "RemoveContainer" containerID="d0a1437bd15130925a740c259ecdc4342e886fbcfecf8430fa402c5d9360e53c" Dec 04 16:04:22 crc kubenswrapper[4946]: I1204 16:04:22.967558 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:04:22 crc kubenswrapper[4946]: E1204 16:04:22.967875 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:04:37 crc kubenswrapper[4946]: I1204 16:04:37.453262 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:04:37 crc kubenswrapper[4946]: E1204 16:04:37.454297 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:04:42 crc kubenswrapper[4946]: I1204 16:04:42.616806 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a304ef91-9673-43d6-8b91-0ba511961217/init-config-reloader/0.log" Dec 04 16:04:42 crc kubenswrapper[4946]: I1204 16:04:42.801014 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a304ef91-9673-43d6-8b91-0ba511961217/init-config-reloader/0.log" Dec 04 16:04:42 crc kubenswrapper[4946]: I1204 16:04:42.892075 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a304ef91-9673-43d6-8b91-0ba511961217/alertmanager/0.log" Dec 04 16:04:42 crc kubenswrapper[4946]: I1204 16:04:42.912711 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a304ef91-9673-43d6-8b91-0ba511961217/config-reloader/0.log" Dec 04 16:04:43 crc kubenswrapper[4946]: I1204 16:04:43.035774 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-d6b8cfb46-xzwxx_6a807e28-4c6a-435c-b640-a11ae6770632/barbican-api/0.log" Dec 04 16:04:43 crc kubenswrapper[4946]: I1204 16:04:43.100634 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-d6b8cfb46-xzwxx_6a807e28-4c6a-435c-b640-a11ae6770632/barbican-api-log/0.log" Dec 04 16:04:43 crc kubenswrapper[4946]: I1204 16:04:43.198249 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5dbfff5fc8-dg589_71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2/barbican-keystone-listener/0.log" Dec 04 16:04:43 crc kubenswrapper[4946]: I1204 16:04:43.412054 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5d869cc959-j4wsw_47433338-b9cd-4b5d-beaf-e551ca335c0e/barbican-worker/0.log" Dec 04 16:04:43 crc kubenswrapper[4946]: I1204 16:04:43.437481 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5dbfff5fc8-dg589_71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2/barbican-keystone-listener-log/0.log" Dec 04 16:04:43 crc kubenswrapper[4946]: I1204 16:04:43.494861 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5d869cc959-j4wsw_47433338-b9cd-4b5d-beaf-e551ca335c0e/barbican-worker-log/0.log" Dec 04 16:04:43 crc kubenswrapper[4946]: I1204 16:04:43.690804 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c_59863a34-23ab-44bb-be9a-dae51f8dd6c1/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:43 crc kubenswrapper[4946]: I1204 16:04:43.943552 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_655a0ab4-533d-4447-8656-72742f94f4a7/ceilometer-central-agent/0.log" Dec 04 16:04:44 crc kubenswrapper[4946]: I1204 16:04:44.075442 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_655a0ab4-533d-4447-8656-72742f94f4a7/proxy-httpd/0.log" Dec 04 16:04:44 crc kubenswrapper[4946]: I1204 16:04:44.104203 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_655a0ab4-533d-4447-8656-72742f94f4a7/ceilometer-notification-agent/0.log" Dec 04 16:04:44 crc kubenswrapper[4946]: I1204 16:04:44.136685 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_655a0ab4-533d-4447-8656-72742f94f4a7/sg-core/0.log" Dec 04 16:04:44 crc kubenswrapper[4946]: I1204 16:04:44.311780 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0203bca7-1453-4a7b-8597-5286d1d245b2/cinder-api-log/0.log" Dec 04 16:04:44 crc kubenswrapper[4946]: I1204 16:04:44.366990 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0203bca7-1453-4a7b-8597-5286d1d245b2/cinder-api/0.log" Dec 04 16:04:44 crc kubenswrapper[4946]: I1204 16:04:44.507986 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_fb133f81-1fe2-4e36-8663-8301e9373627/cinder-scheduler/0.log" Dec 04 16:04:44 crc kubenswrapper[4946]: I1204 16:04:44.590821 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_fb133f81-1fe2-4e36-8663-8301e9373627/probe/0.log" Dec 04 16:04:44 crc kubenswrapper[4946]: I1204 16:04:44.788385 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_602d77a3-3d2b-488d-ac47-74d9fd037d6c/cloudkitty-api/0.log" Dec 04 16:04:44 crc kubenswrapper[4946]: I1204 16:04:44.803720 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_602d77a3-3d2b-488d-ac47-74d9fd037d6c/cloudkitty-api-log/0.log" Dec 04 16:04:44 crc kubenswrapper[4946]: I1204 16:04:44.976086 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_47583dfd-ecd6-41d8-ac98-748683cd0ae5/loki-compactor/0.log" Dec 04 16:04:45 crc kubenswrapper[4946]: I1204 16:04:45.235145 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-664b687b54-76w8c_b0adb62a-e125-4612-8e57-74bab154a2c4/loki-distributor/0.log" Dec 04 16:04:45 crc kubenswrapper[4946]: I1204 16:04:45.283558 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-c6jcc_c72f3e47-f551-4d7e-8978-cf453bc9a80d/gateway/0.log" Dec 04 16:04:45 crc kubenswrapper[4946]: I1204 16:04:45.535593 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-jtbm6_2ffa4fa2-c466-47f5-bca6-613ec9e52779/gateway/0.log" Dec 04 16:04:45 crc kubenswrapper[4946]: I1204 16:04:45.606885 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_b21846fc-0f45-4cae-aea6-b4e3f33ec03a/loki-index-gateway/0.log" Dec 04 16:04:46 crc kubenswrapper[4946]: I1204 16:04:46.103330 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn_55a6559d-165f-4fb0-ac08-a0ba07d02cac/loki-query-frontend/0.log" Dec 04 16:04:46 crc kubenswrapper[4946]: I1204 16:04:46.120004 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_decd9bb2-7749-48ff-b886-74e49bf5222d/loki-ingester/0.log" Dec 04 16:04:46 crc kubenswrapper[4946]: I1204 16:04:46.806800 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm_d2eb924b-02a3-41e8-b820-0a89c1420ebc/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:46 crc kubenswrapper[4946]: I1204 16:04:46.883151 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb_3c768922-7c81-4021-ab76-fd151946e8fa/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:47 crc kubenswrapper[4946]: I1204 16:04:47.001976 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-5467947bf7-wwslq_02db9740-8e77-440b-95f9-6a2968cd39fe/loki-querier/0.log" Dec 04 16:04:47 crc kubenswrapper[4946]: I1204 16:04:47.088501 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-vglk4_d984a81d-2489-42fa-b527-8962119b7dc5/init/0.log" Dec 04 16:04:47 crc kubenswrapper[4946]: I1204 16:04:47.487817 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-vglk4_d984a81d-2489-42fa-b527-8962119b7dc5/init/0.log" Dec 04 16:04:47 crc kubenswrapper[4946]: I1204 16:04:47.558402 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-vglk4_d984a81d-2489-42fa-b527-8962119b7dc5/dnsmasq-dns/0.log" Dec 04 16:04:47 crc kubenswrapper[4946]: I1204 16:04:47.709139 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-h6x67_707e8d7d-0e5d-4e4c-ab78-9a4745449b8c/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:47 crc kubenswrapper[4946]: I1204 16:04:47.865330 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_57cbb428-8955-4aa2-9025-cfdd74592074/glance-httpd/0.log" Dec 04 16:04:47 crc kubenswrapper[4946]: I1204 16:04:47.989394 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_57cbb428-8955-4aa2-9025-cfdd74592074/glance-log/0.log" Dec 04 16:04:48 crc kubenswrapper[4946]: I1204 16:04:48.163407 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_0b2521f9-40c6-4e13-a510-68d5dc34b313/glance-log/0.log" Dec 04 16:04:48 crc kubenswrapper[4946]: I1204 16:04:48.180256 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_0b2521f9-40c6-4e13-a510-68d5dc34b313/glance-httpd/0.log" Dec 04 16:04:48 crc kubenswrapper[4946]: I1204 16:04:48.400262 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-bn587_f35809fc-31b6-4c6b-a652-928ed15e187e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:48 crc kubenswrapper[4946]: I1204 16:04:48.807650 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-hw8x7_ab9c79b0-c651-4fdb-aa44-76b66239ef80/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:49 crc kubenswrapper[4946]: I1204 16:04:49.235507 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29414401-bnsr9_ea23c958-243b-479d-a5f3-83e729f96b17/keystone-cron/0.log" Dec 04 16:04:49 crc kubenswrapper[4946]: I1204 16:04:49.543607 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_7bb3c93e-3400-4b38-bc6d-733a1d345435/kube-state-metrics/0.log" Dec 04 16:04:49 crc kubenswrapper[4946]: I1204 16:04:49.577130 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-b7bc594d8-sjpg5_279e516e-61bc-4d5b-a3f9-34ecc6c5f47b/keystone-api/0.log" Dec 04 16:04:49 crc kubenswrapper[4946]: I1204 16:04:49.777971 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w_b0812311-5552-4d94-aa72-d7274447e1f6/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:50 crc kubenswrapper[4946]: I1204 16:04:50.369690 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9c7bc6557-kqv86_5ff5d11b-6f56-4794-97a4-172ef873766c/neutron-api/0.log" Dec 04 16:04:50 crc kubenswrapper[4946]: I1204 16:04:50.396991 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9c7bc6557-kqv86_5ff5d11b-6f56-4794-97a4-172ef873766c/neutron-httpd/0.log" Dec 04 16:04:50 crc kubenswrapper[4946]: I1204 16:04:50.413341 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_25621d99-0fe9-42fe-a800-08160c4740aa/cloudkitty-proc/0.log" Dec 04 16:04:50 crc kubenswrapper[4946]: I1204 16:04:50.592995 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf_4370c15e-59ff-447e-a825-c687fde1efe0/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:50 crc kubenswrapper[4946]: I1204 16:04:50.987690 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_580cdc0a-af87-4eac-8b8e-79d451eb312c/nova-api-log/0.log" Dec 04 16:04:51 crc kubenswrapper[4946]: I1204 16:04:51.160882 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_75446cac-ffe3-4e3a-9bde-e8372b8318c3/nova-cell0-conductor-conductor/0.log" Dec 04 16:04:51 crc kubenswrapper[4946]: I1204 16:04:51.258630 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_580cdc0a-af87-4eac-8b8e-79d451eb312c/nova-api-api/0.log" Dec 04 16:04:51 crc kubenswrapper[4946]: I1204 16:04:51.380639 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_4664ae52-b2f1-43d1-a79f-75ccb8fc3a07/nova-cell1-conductor-conductor/0.log" Dec 04 16:04:51 crc kubenswrapper[4946]: I1204 16:04:51.506228 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_2f2624be-b71d-475e-a895-515905f6ef24/nova-cell1-novncproxy-novncproxy/0.log" Dec 04 16:04:51 crc kubenswrapper[4946]: I1204 16:04:51.716080 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-xh4n5_16f11a61-301b-45bc-9ef4-675b164d4ace/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:51 crc kubenswrapper[4946]: I1204 16:04:51.996671 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_9c624054-0759-47af-af3e-4600907ab8b8/nova-metadata-log/0.log" Dec 04 16:04:52 crc kubenswrapper[4946]: I1204 16:04:52.300385 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_29ba8816-fd2e-4a8d-bbcf-d2178110c7eb/nova-scheduler-scheduler/0.log" Dec 04 16:04:52 crc kubenswrapper[4946]: I1204 16:04:52.357101 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_44a85e36-b029-4450-b8aa-11bf910d8139/mysql-bootstrap/0.log" Dec 04 16:04:52 crc kubenswrapper[4946]: I1204 16:04:52.453148 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:04:52 crc kubenswrapper[4946]: E1204 16:04:52.453511 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:04:52 crc kubenswrapper[4946]: I1204 16:04:52.698463 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_44a85e36-b029-4450-b8aa-11bf910d8139/galera/0.log" Dec 04 16:04:52 crc kubenswrapper[4946]: I1204 16:04:52.775566 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_44a85e36-b029-4450-b8aa-11bf910d8139/mysql-bootstrap/0.log" Dec 04 16:04:52 crc kubenswrapper[4946]: I1204 16:04:52.975452 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_43d26c42-eba9-4e5c-bd2d-7cdf7074a176/mysql-bootstrap/0.log" Dec 04 16:04:53 crc kubenswrapper[4946]: I1204 16:04:53.271161 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_43d26c42-eba9-4e5c-bd2d-7cdf7074a176/galera/0.log" Dec 04 16:04:53 crc kubenswrapper[4946]: I1204 16:04:53.327251 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_43d26c42-eba9-4e5c-bd2d-7cdf7074a176/mysql-bootstrap/0.log" Dec 04 16:04:53 crc kubenswrapper[4946]: I1204 16:04:53.386674 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_9c624054-0759-47af-af3e-4600907ab8b8/nova-metadata-metadata/0.log" Dec 04 16:04:53 crc kubenswrapper[4946]: I1204 16:04:53.480196 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_b5c0e428-98ad-4bda-aba1-685f1b5c8009/openstackclient/0.log" Dec 04 16:04:53 crc kubenswrapper[4946]: I1204 16:04:53.679602 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-nx7vc_3a94df46-46e0-4178-804b-1582e9cf7738/openstack-network-exporter/0.log" Dec 04 16:04:53 crc kubenswrapper[4946]: I1204 16:04:53.789970 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hc6tt_9c369924-f384-4ca1-b3ac-e1b334790f15/ovsdb-server-init/0.log" Dec 04 16:04:54 crc kubenswrapper[4946]: I1204 16:04:54.118254 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hc6tt_9c369924-f384-4ca1-b3ac-e1b334790f15/ovsdb-server/0.log" Dec 04 16:04:54 crc kubenswrapper[4946]: I1204 16:04:54.156450 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hc6tt_9c369924-f384-4ca1-b3ac-e1b334790f15/ovs-vswitchd/0.log" Dec 04 16:04:54 crc kubenswrapper[4946]: I1204 16:04:54.181768 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hc6tt_9c369924-f384-4ca1-b3ac-e1b334790f15/ovsdb-server-init/0.log" Dec 04 16:04:54 crc kubenswrapper[4946]: I1204 16:04:54.423435 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-qv4hw_2734e466-178a-4344-bfac-9adb5e4492a7/ovn-controller/0.log" Dec 04 16:04:54 crc kubenswrapper[4946]: I1204 16:04:54.501991 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-8b4ms_c761f173-f866-4098-adc7-426857a5004c/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:54 crc kubenswrapper[4946]: I1204 16:04:54.641209 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7/openstack-network-exporter/0.log" Dec 04 16:04:54 crc kubenswrapper[4946]: I1204 16:04:54.758680 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7/ovn-northd/0.log" Dec 04 16:04:54 crc kubenswrapper[4946]: I1204 16:04:54.985154 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_c6906a68-0819-41bc-a3d8-2ac76e77b67f/openstack-network-exporter/0.log" Dec 04 16:04:54 crc kubenswrapper[4946]: I1204 16:04:54.988054 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_c6906a68-0819-41bc-a3d8-2ac76e77b67f/ovsdbserver-nb/0.log" Dec 04 16:04:55 crc kubenswrapper[4946]: I1204 16:04:55.207950 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_f89f1623-6a48-4db4-8059-940887046c8e/openstack-network-exporter/0.log" Dec 04 16:04:55 crc kubenswrapper[4946]: I1204 16:04:55.293417 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_f89f1623-6a48-4db4-8059-940887046c8e/ovsdbserver-sb/0.log" Dec 04 16:04:55 crc kubenswrapper[4946]: I1204 16:04:55.485634 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6c965d6d44-d6246_8177c201-11cb-42af-8a3f-85944e6558a3/placement-api/0.log" Dec 04 16:04:55 crc kubenswrapper[4946]: I1204 16:04:55.602408 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6c965d6d44-d6246_8177c201-11cb-42af-8a3f-85944e6558a3/placement-log/0.log" Dec 04 16:04:55 crc kubenswrapper[4946]: I1204 16:04:55.642040 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_ce1f1c5e-70ed-463e-88d7-a0a960dd328d/init-config-reloader/0.log" Dec 04 16:04:55 crc kubenswrapper[4946]: I1204 16:04:55.984759 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_ce1f1c5e-70ed-463e-88d7-a0a960dd328d/config-reloader/0.log" Dec 04 16:04:56 crc kubenswrapper[4946]: I1204 16:04:56.006316 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_ce1f1c5e-70ed-463e-88d7-a0a960dd328d/init-config-reloader/0.log" Dec 04 16:04:56 crc kubenswrapper[4946]: I1204 16:04:56.012744 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_ce1f1c5e-70ed-463e-88d7-a0a960dd328d/thanos-sidecar/0.log" Dec 04 16:04:56 crc kubenswrapper[4946]: I1204 16:04:56.017388 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_ce1f1c5e-70ed-463e-88d7-a0a960dd328d/prometheus/0.log" Dec 04 16:04:56 crc kubenswrapper[4946]: I1204 16:04:56.360547 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6458626f-136f-475a-b7ad-cf32977e39eb/setup-container/0.log" Dec 04 16:04:56 crc kubenswrapper[4946]: I1204 16:04:56.526330 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6458626f-136f-475a-b7ad-cf32977e39eb/setup-container/0.log" Dec 04 16:04:56 crc kubenswrapper[4946]: I1204 16:04:56.579764 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6458626f-136f-475a-b7ad-cf32977e39eb/rabbitmq/0.log" Dec 04 16:04:56 crc kubenswrapper[4946]: I1204 16:04:56.693413 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f75f35c6-b58d-471d-9b5e-2d402f3ce92f/setup-container/0.log" Dec 04 16:04:57 crc kubenswrapper[4946]: I1204 16:04:57.000177 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f75f35c6-b58d-471d-9b5e-2d402f3ce92f/setup-container/0.log" Dec 04 16:04:57 crc kubenswrapper[4946]: I1204 16:04:57.014844 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f75f35c6-b58d-471d-9b5e-2d402f3ce92f/rabbitmq/0.log" Dec 04 16:04:57 crc kubenswrapper[4946]: I1204 16:04:57.139583 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc_c93b77be-2594-456e-a0fc-0a73d3bc6a0b/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:57 crc kubenswrapper[4946]: I1204 16:04:57.289350 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-h9gzd_34194ffb-2211-4d3b-820e-87e8008211a8/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:57 crc kubenswrapper[4946]: I1204 16:04:57.475819 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b_cc2c7406-87e9-4da5-b99c-845bddf4a05b/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:57 crc kubenswrapper[4946]: I1204 16:04:57.669533 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-dztmc_e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:57 crc kubenswrapper[4946]: I1204 16:04:57.824217 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-vxxvc_55c4138e-0212-42f5-a45c-52eead1474d3/ssh-known-hosts-edpm-deployment/0.log" Dec 04 16:04:58 crc kubenswrapper[4946]: I1204 16:04:58.003144 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8679d7877f-2wbn9_bec308a1-7b44-4153-a863-7b9755407899/proxy-server/0.log" Dec 04 16:04:58 crc kubenswrapper[4946]: I1204 16:04:58.249373 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-rnp6k_f2266dde-4870-46a5-9c4a-c348c6c4d4ed/swift-ring-rebalance/0.log" Dec 04 16:04:58 crc kubenswrapper[4946]: I1204 16:04:58.250140 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8679d7877f-2wbn9_bec308a1-7b44-4153-a863-7b9755407899/proxy-httpd/0.log" Dec 04 16:04:58 crc kubenswrapper[4946]: I1204 16:04:58.514277 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/account-reaper/0.log" Dec 04 16:04:58 crc kubenswrapper[4946]: I1204 16:04:58.521677 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/account-auditor/0.log" Dec 04 16:04:58 crc kubenswrapper[4946]: I1204 16:04:58.613556 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/account-replicator/0.log" Dec 04 16:04:58 crc kubenswrapper[4946]: I1204 16:04:58.710904 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/account-server/0.log" Dec 04 16:04:58 crc kubenswrapper[4946]: I1204 16:04:58.766010 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/container-auditor/0.log" Dec 04 16:04:58 crc kubenswrapper[4946]: I1204 16:04:58.859218 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/container-replicator/0.log" Dec 04 16:04:58 crc kubenswrapper[4946]: I1204 16:04:58.971108 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/container-server/0.log" Dec 04 16:04:59 crc kubenswrapper[4946]: I1204 16:04:59.082859 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/container-updater/0.log" Dec 04 16:04:59 crc kubenswrapper[4946]: I1204 16:04:59.098240 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/object-auditor/0.log" Dec 04 16:04:59 crc kubenswrapper[4946]: I1204 16:04:59.167264 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/object-expirer/0.log" Dec 04 16:04:59 crc kubenswrapper[4946]: I1204 16:04:59.220321 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/object-replicator/0.log" Dec 04 16:04:59 crc kubenswrapper[4946]: I1204 16:04:59.410376 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/object-updater/0.log" Dec 04 16:04:59 crc kubenswrapper[4946]: I1204 16:04:59.481303 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/rsync/0.log" Dec 04 16:04:59 crc kubenswrapper[4946]: I1204 16:04:59.501095 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/object-server/0.log" Dec 04 16:04:59 crc kubenswrapper[4946]: I1204 16:04:59.535634 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/swift-recon-cron/0.log" Dec 04 16:04:59 crc kubenswrapper[4946]: I1204 16:04:59.799720 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj_c3a03510-ccc5-4bce-9a72-0e943fd6423d/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:04:59 crc kubenswrapper[4946]: I1204 16:04:59.943554 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_3ca94ef1-1df3-4925-9335-b30db3fbffb9/tempest-tests-tempest-tests-runner/0.log" Dec 04 16:05:00 crc kubenswrapper[4946]: I1204 16:05:00.176521 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_58d92c3f-b05e-47c1-89f7-55d7c3686966/test-operator-logs-container/0.log" Dec 04 16:05:00 crc kubenswrapper[4946]: I1204 16:05:00.351354 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-6l555_a3311d26-79ab-4472-944b-4d6ac8847a76/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:05:06 crc kubenswrapper[4946]: I1204 16:05:06.501633 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_74261534-d493-4bb6-ac4f-e7196daaa71f/memcached/0.log" Dec 04 16:05:07 crc kubenswrapper[4946]: I1204 16:05:07.453935 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:05:07 crc kubenswrapper[4946]: E1204 16:05:07.454697 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:05:20 crc kubenswrapper[4946]: I1204 16:05:20.452977 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:05:20 crc kubenswrapper[4946]: E1204 16:05:20.453958 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:05:35 crc kubenswrapper[4946]: I1204 16:05:35.453580 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:05:35 crc kubenswrapper[4946]: E1204 16:05:35.457126 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:05:35 crc kubenswrapper[4946]: I1204 16:05:35.711144 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-hdr95_6d911452-36e0-4227-9068-4ed0b86f025c/kube-rbac-proxy/0.log" Dec 04 16:05:35 crc kubenswrapper[4946]: I1204 16:05:35.921942 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-hdr95_6d911452-36e0-4227-9068-4ed0b86f025c/manager/0.log" Dec 04 16:05:36 crc kubenswrapper[4946]: I1204 16:05:36.010465 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-29dnk_76e27cbb-fdb9-447e-983f-48b7dbe8d46d/kube-rbac-proxy/0.log" Dec 04 16:05:36 crc kubenswrapper[4946]: I1204 16:05:36.170232 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-29dnk_76e27cbb-fdb9-447e-983f-48b7dbe8d46d/manager/0.log" Dec 04 16:05:36 crc kubenswrapper[4946]: I1204 16:05:36.307004 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/util/0.log" Dec 04 16:05:36 crc kubenswrapper[4946]: I1204 16:05:36.574746 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/pull/0.log" Dec 04 16:05:36 crc kubenswrapper[4946]: I1204 16:05:36.597506 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/pull/0.log" Dec 04 16:05:36 crc kubenswrapper[4946]: I1204 16:05:36.663440 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/util/0.log" Dec 04 16:05:36 crc kubenswrapper[4946]: I1204 16:05:36.882272 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/util/0.log" Dec 04 16:05:36 crc kubenswrapper[4946]: I1204 16:05:36.882656 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/extract/0.log" Dec 04 16:05:36 crc kubenswrapper[4946]: I1204 16:05:36.882970 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/pull/0.log" Dec 04 16:05:37 crc kubenswrapper[4946]: I1204 16:05:37.872525 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-zdqwh_d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f/manager/0.log" Dec 04 16:05:37 crc kubenswrapper[4946]: I1204 16:05:37.890969 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-zdqwh_d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f/kube-rbac-proxy/0.log" Dec 04 16:05:37 crc kubenswrapper[4946]: I1204 16:05:37.891510 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-fsn5z_a69ef7eb-6ffc-47cb-b7ee-7c46734d0857/kube-rbac-proxy/0.log" Dec 04 16:05:38 crc kubenswrapper[4946]: I1204 16:05:38.115478 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-bd6fv_ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5/kube-rbac-proxy/0.log" Dec 04 16:05:38 crc kubenswrapper[4946]: I1204 16:05:38.140165 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-bd6fv_ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5/manager/0.log" Dec 04 16:05:38 crc kubenswrapper[4946]: I1204 16:05:38.189864 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-fsn5z_a69ef7eb-6ffc-47cb-b7ee-7c46734d0857/manager/0.log" Dec 04 16:05:38 crc kubenswrapper[4946]: I1204 16:05:38.456382 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-rrwq2_866cf896-d679-426b-80d9-de7a368958ed/manager/0.log" Dec 04 16:05:38 crc kubenswrapper[4946]: I1204 16:05:38.464327 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-rrwq2_866cf896-d679-426b-80d9-de7a368958ed/kube-rbac-proxy/0.log" Dec 04 16:05:38 crc kubenswrapper[4946]: I1204 16:05:38.543078 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-tpk4r_421ad636-5eeb-4596-84c0-a0ca3cfbdef2/kube-rbac-proxy/0.log" Dec 04 16:05:38 crc kubenswrapper[4946]: I1204 16:05:38.760443 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-bnspk_9584ac77-41db-4621-a720-88b7c107ffa2/kube-rbac-proxy/0.log" Dec 04 16:05:38 crc kubenswrapper[4946]: I1204 16:05:38.766827 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-bnspk_9584ac77-41db-4621-a720-88b7c107ffa2/manager/0.log" Dec 04 16:05:38 crc kubenswrapper[4946]: I1204 16:05:38.893320 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-tpk4r_421ad636-5eeb-4596-84c0-a0ca3cfbdef2/manager/0.log" Dec 04 16:05:39 crc kubenswrapper[4946]: I1204 16:05:39.066579 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-8nbch_965366ad-4bb5-424a-9cf0-d09c42dec244/kube-rbac-proxy/0.log" Dec 04 16:05:39 crc kubenswrapper[4946]: I1204 16:05:39.109459 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-8nbch_965366ad-4bb5-424a-9cf0-d09c42dec244/manager/0.log" Dec 04 16:05:39 crc kubenswrapper[4946]: I1204 16:05:39.150887 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-j8r75_52d7003e-8315-49b6-b086-f0655f555960/kube-rbac-proxy/0.log" Dec 04 16:05:39 crc kubenswrapper[4946]: I1204 16:05:39.204482 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-j8r75_52d7003e-8315-49b6-b086-f0655f555960/manager/0.log" Dec 04 16:05:39 crc kubenswrapper[4946]: I1204 16:05:39.345482 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-2wkbw_db2d87e7-4cf3-4d0d-b77e-2d02a073872c/kube-rbac-proxy/0.log" Dec 04 16:05:39 crc kubenswrapper[4946]: I1204 16:05:39.472010 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-2wkbw_db2d87e7-4cf3-4d0d-b77e-2d02a073872c/manager/0.log" Dec 04 16:05:39 crc kubenswrapper[4946]: I1204 16:05:39.476418 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-ptn6j_41b063f1-7646-49dc-85e4-9e7185220de1/kube-rbac-proxy/0.log" Dec 04 16:05:39 crc kubenswrapper[4946]: I1204 16:05:39.603360 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-ptn6j_41b063f1-7646-49dc-85e4-9e7185220de1/manager/0.log" Dec 04 16:05:39 crc kubenswrapper[4946]: I1204 16:05:39.649093 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-vzpjw_10b2d29b-4444-4dfe-ad8f-ad913798df88/kube-rbac-proxy/0.log" Dec 04 16:05:39 crc kubenswrapper[4946]: I1204 16:05:39.826920 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-vzpjw_10b2d29b-4444-4dfe-ad8f-ad913798df88/manager/0.log" Dec 04 16:05:39 crc kubenswrapper[4946]: I1204 16:05:39.927901 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-77ss8_262aaccf-cdc8-44b6-8fc6-8702491cfad8/kube-rbac-proxy/0.log" Dec 04 16:05:40 crc kubenswrapper[4946]: I1204 16:05:40.022373 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-77ss8_262aaccf-cdc8-44b6-8fc6-8702491cfad8/manager/0.log" Dec 04 16:05:40 crc kubenswrapper[4946]: I1204 16:05:40.135034 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4frldw_e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6/manager/0.log" Dec 04 16:05:40 crc kubenswrapper[4946]: I1204 16:05:40.184037 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4frldw_e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6/kube-rbac-proxy/0.log" Dec 04 16:05:40 crc kubenswrapper[4946]: I1204 16:05:40.551615 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-ttbdt_06d05850-f87f-4944-be9f-c3f86f6bbc3e/registry-server/0.log" Dec 04 16:05:40 crc kubenswrapper[4946]: I1204 16:05:40.557073 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-769dd9b968-btnbt_aa1fefcd-b28b-4ecf-9b92-e1fabe27cd26/operator/0.log" Dec 04 16:05:40 crc kubenswrapper[4946]: I1204 16:05:40.799071 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-tp7zf_4439c79c-3951-4b61-98ad-86f417432fde/kube-rbac-proxy/0.log" Dec 04 16:05:40 crc kubenswrapper[4946]: I1204 16:05:40.986026 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-rdrpr_110a7ea7-4b02-4f5d-be16-87c4f0090eec/kube-rbac-proxy/0.log" Dec 04 16:05:40 crc kubenswrapper[4946]: I1204 16:05:40.989894 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-tp7zf_4439c79c-3951-4b61-98ad-86f417432fde/manager/0.log" Dec 04 16:05:41 crc kubenswrapper[4946]: I1204 16:05:41.159313 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-rdrpr_110a7ea7-4b02-4f5d-be16-87c4f0090eec/manager/0.log" Dec 04 16:05:41 crc kubenswrapper[4946]: I1204 16:05:41.355061 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-ffkxj_57d9b742-9429-43c6-8798-6813c321866f/operator/0.log" Dec 04 16:05:41 crc kubenswrapper[4946]: I1204 16:05:41.385650 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-25vwl_b7a5eb4e-a8b8-43e5-95cf-51f40d454d79/kube-rbac-proxy/0.log" Dec 04 16:05:41 crc kubenswrapper[4946]: I1204 16:05:41.551636 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7b58c9d549-7lmqq_6a246ded-a3c1-42c5-a6a7-648dec93f77f/manager/0.log" Dec 04 16:05:41 crc kubenswrapper[4946]: I1204 16:05:41.681945 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-25vwl_b7a5eb4e-a8b8-43e5-95cf-51f40d454d79/manager/0.log" Dec 04 16:05:41 crc kubenswrapper[4946]: I1204 16:05:41.684851 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d9cf8555c-csjn7_c9933077-41f3-425f-b478-c53691b7d817/kube-rbac-proxy/0.log" Dec 04 16:05:41 crc kubenswrapper[4946]: I1204 16:05:41.995616 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-bqtnh_c92477ee-92e6-4dca-af5d-9b0f44bcaf60/manager/0.log" Dec 04 16:05:42 crc kubenswrapper[4946]: I1204 16:05:42.014885 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-bqtnh_c92477ee-92e6-4dca-af5d-9b0f44bcaf60/kube-rbac-proxy/0.log" Dec 04 16:05:42 crc kubenswrapper[4946]: I1204 16:05:42.111055 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d9cf8555c-csjn7_c9933077-41f3-425f-b478-c53691b7d817/manager/0.log" Dec 04 16:05:42 crc kubenswrapper[4946]: I1204 16:05:42.293176 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-f6jlm_fa8a1267-46f8-4554-8a91-7389be265abd/kube-rbac-proxy/0.log" Dec 04 16:05:42 crc kubenswrapper[4946]: I1204 16:05:42.293557 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-f6jlm_fa8a1267-46f8-4554-8a91-7389be265abd/manager/0.log" Dec 04 16:05:48 crc kubenswrapper[4946]: I1204 16:05:48.453965 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:05:48 crc kubenswrapper[4946]: E1204 16:05:48.455103 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:06:03 crc kubenswrapper[4946]: I1204 16:06:03.453791 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:06:03 crc kubenswrapper[4946]: E1204 16:06:03.455557 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:06:10 crc kubenswrapper[4946]: I1204 16:06:10.340158 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-rvvcq_be182b12-eeb7-4695-b7e4-247044da76cf/control-plane-machine-set-operator/0.log" Dec 04 16:06:10 crc kubenswrapper[4946]: I1204 16:06:10.633796 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7bzmc_65dc1ade-ddd4-4a22-99bd-780112f318f9/kube-rbac-proxy/0.log" Dec 04 16:06:10 crc kubenswrapper[4946]: I1204 16:06:10.725934 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7bzmc_65dc1ade-ddd4-4a22-99bd-780112f318f9/machine-api-operator/0.log" Dec 04 16:06:18 crc kubenswrapper[4946]: I1204 16:06:18.453747 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:06:18 crc kubenswrapper[4946]: E1204 16:06:18.454781 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:06:29 crc kubenswrapper[4946]: I1204 16:06:29.715104 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-cr2wp_41b2bdd0-54fd-436e-a498-056e3fdd6934/cert-manager-controller/0.log" Dec 04 16:06:29 crc kubenswrapper[4946]: I1204 16:06:29.984033 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-kd8gk_bb8b188e-8662-4027-9493-886326967ed1/cert-manager-cainjector/0.log" Dec 04 16:06:30 crc kubenswrapper[4946]: I1204 16:06:30.053929 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-dd4pw_3611b347-1802-4635-8abd-47d9a6f4ad29/cert-manager-webhook/0.log" Dec 04 16:06:33 crc kubenswrapper[4946]: I1204 16:06:33.460413 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:06:33 crc kubenswrapper[4946]: E1204 16:06:33.461591 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:06:46 crc kubenswrapper[4946]: I1204 16:06:46.452863 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:06:46 crc kubenswrapper[4946]: E1204 16:06:46.453644 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:06:47 crc kubenswrapper[4946]: I1204 16:06:47.149657 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-ms9gw_cc926dea-6324-4350-bf4c-6f4142b2547b/nmstate-console-plugin/0.log" Dec 04 16:06:47 crc kubenswrapper[4946]: I1204 16:06:47.370278 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-b6z2h_5ae26fa8-9751-40d0-b327-45011a9ec579/nmstate-handler/0.log" Dec 04 16:06:47 crc kubenswrapper[4946]: I1204 16:06:47.528820 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-bxmt7_70f33645-a744-4196-a5d0-e577c90023d5/kube-rbac-proxy/0.log" Dec 04 16:06:47 crc kubenswrapper[4946]: I1204 16:06:47.593278 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-bxmt7_70f33645-a744-4196-a5d0-e577c90023d5/nmstate-metrics/0.log" Dec 04 16:06:47 crc kubenswrapper[4946]: I1204 16:06:47.688543 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-x8ccj_05dcb49b-4fdf-4fdb-b619-fc7649bb203d/nmstate-operator/0.log" Dec 04 16:06:47 crc kubenswrapper[4946]: I1204 16:06:47.775067 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-cb89w_4f22e89a-c84f-4f88-8718-2d3c7238324a/nmstate-webhook/0.log" Dec 04 16:07:00 crc kubenswrapper[4946]: I1204 16:07:00.453767 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:07:00 crc kubenswrapper[4946]: E1204 16:07:00.455017 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:07:03 crc kubenswrapper[4946]: I1204 16:07:03.535444 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5f5b48f4dc-5fjct_14739c62-fc32-41a5-be6d-3f6673c6a231/kube-rbac-proxy/0.log" Dec 04 16:07:03 crc kubenswrapper[4946]: I1204 16:07:03.559837 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5f5b48f4dc-5fjct_14739c62-fc32-41a5-be6d-3f6673c6a231/manager/0.log" Dec 04 16:07:12 crc kubenswrapper[4946]: I1204 16:07:12.456581 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:07:12 crc kubenswrapper[4946]: E1204 16:07:12.459181 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:07:20 crc kubenswrapper[4946]: I1204 16:07:20.086042 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2h9x2_12008ba7-79ab-4c29-beb5-c3d5bffa7bd2/controller/0.log" Dec 04 16:07:20 crc kubenswrapper[4946]: I1204 16:07:20.141161 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2h9x2_12008ba7-79ab-4c29-beb5-c3d5bffa7bd2/kube-rbac-proxy/0.log" Dec 04 16:07:20 crc kubenswrapper[4946]: I1204 16:07:20.306222 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-frr-files/0.log" Dec 04 16:07:20 crc kubenswrapper[4946]: I1204 16:07:20.486222 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-reloader/0.log" Dec 04 16:07:20 crc kubenswrapper[4946]: I1204 16:07:20.508027 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-metrics/0.log" Dec 04 16:07:20 crc kubenswrapper[4946]: I1204 16:07:20.543026 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-reloader/0.log" Dec 04 16:07:20 crc kubenswrapper[4946]: I1204 16:07:20.543613 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-frr-files/0.log" Dec 04 16:07:20 crc kubenswrapper[4946]: I1204 16:07:20.691700 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-frr-files/0.log" Dec 04 16:07:20 crc kubenswrapper[4946]: I1204 16:07:20.751867 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-reloader/0.log" Dec 04 16:07:20 crc kubenswrapper[4946]: I1204 16:07:20.800297 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-metrics/0.log" Dec 04 16:07:20 crc kubenswrapper[4946]: I1204 16:07:20.806523 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-metrics/0.log" Dec 04 16:07:21 crc kubenswrapper[4946]: I1204 16:07:21.045155 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-reloader/0.log" Dec 04 16:07:21 crc kubenswrapper[4946]: I1204 16:07:21.132715 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/controller/0.log" Dec 04 16:07:21 crc kubenswrapper[4946]: I1204 16:07:21.142654 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-frr-files/0.log" Dec 04 16:07:21 crc kubenswrapper[4946]: I1204 16:07:21.152875 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-metrics/0.log" Dec 04 16:07:21 crc kubenswrapper[4946]: I1204 16:07:21.438724 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/kube-rbac-proxy/0.log" Dec 04 16:07:21 crc kubenswrapper[4946]: I1204 16:07:21.446314 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/kube-rbac-proxy-frr/0.log" Dec 04 16:07:21 crc kubenswrapper[4946]: I1204 16:07:21.449169 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/frr-metrics/0.log" Dec 04 16:07:21 crc kubenswrapper[4946]: I1204 16:07:21.655356 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-665hd_3fb0ad82-3e42-4980-ac9c-3fba3fac16fa/frr-k8s-webhook-server/0.log" Dec 04 16:07:21 crc kubenswrapper[4946]: I1204 16:07:21.679806 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/reloader/0.log" Dec 04 16:07:21 crc kubenswrapper[4946]: I1204 16:07:21.983368 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-9c8665f76-qlhr2_14e7c7f4-2a8f-42fc-85aa-11f9a57be226/manager/0.log" Dec 04 16:07:22 crc kubenswrapper[4946]: I1204 16:07:22.281549 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-76986644d9-lbmnp_c07a0c11-227a-4c24-8daa-695fa165bb03/webhook-server/0.log" Dec 04 16:07:22 crc kubenswrapper[4946]: I1204 16:07:22.441018 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nsx9h_2c933247-f732-4808-a196-15d9ad5f03e7/kube-rbac-proxy/0.log" Dec 04 16:07:23 crc kubenswrapper[4946]: I1204 16:07:23.025723 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/frr/0.log" Dec 04 16:07:23 crc kubenswrapper[4946]: I1204 16:07:23.192811 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nsx9h_2c933247-f732-4808-a196-15d9ad5f03e7/speaker/0.log" Dec 04 16:07:25 crc kubenswrapper[4946]: I1204 16:07:25.453100 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:07:25 crc kubenswrapper[4946]: E1204 16:07:25.453928 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:07:40 crc kubenswrapper[4946]: I1204 16:07:40.454732 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:07:40 crc kubenswrapper[4946]: E1204 16:07:40.456002 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:07:44 crc kubenswrapper[4946]: I1204 16:07:44.693799 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/util/0.log" Dec 04 16:07:44 crc kubenswrapper[4946]: I1204 16:07:44.895788 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/util/0.log" Dec 04 16:07:44 crc kubenswrapper[4946]: I1204 16:07:44.940197 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/pull/0.log" Dec 04 16:07:44 crc kubenswrapper[4946]: I1204 16:07:44.962303 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/pull/0.log" Dec 04 16:07:45 crc kubenswrapper[4946]: I1204 16:07:45.207238 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/extract/0.log" Dec 04 16:07:45 crc kubenswrapper[4946]: I1204 16:07:45.214871 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/util/0.log" Dec 04 16:07:45 crc kubenswrapper[4946]: I1204 16:07:45.236540 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/pull/0.log" Dec 04 16:07:45 crc kubenswrapper[4946]: I1204 16:07:45.473635 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/util/0.log" Dec 04 16:07:45 crc kubenswrapper[4946]: I1204 16:07:45.709634 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/util/0.log" Dec 04 16:07:45 crc kubenswrapper[4946]: I1204 16:07:45.710444 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/pull/0.log" Dec 04 16:07:45 crc kubenswrapper[4946]: I1204 16:07:45.727002 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/pull/0.log" Dec 04 16:07:45 crc kubenswrapper[4946]: I1204 16:07:45.963180 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/extract/0.log" Dec 04 16:07:45 crc kubenswrapper[4946]: I1204 16:07:45.998790 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/pull/0.log" Dec 04 16:07:46 crc kubenswrapper[4946]: I1204 16:07:46.020029 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/util/0.log" Dec 04 16:07:46 crc kubenswrapper[4946]: I1204 16:07:46.319786 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/util/0.log" Dec 04 16:07:46 crc kubenswrapper[4946]: I1204 16:07:46.533982 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/util/0.log" Dec 04 16:07:46 crc kubenswrapper[4946]: I1204 16:07:46.614165 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/pull/0.log" Dec 04 16:07:46 crc kubenswrapper[4946]: I1204 16:07:46.614234 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/pull/0.log" Dec 04 16:07:46 crc kubenswrapper[4946]: I1204 16:07:46.812360 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/util/0.log" Dec 04 16:07:46 crc kubenswrapper[4946]: I1204 16:07:46.822735 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/pull/0.log" Dec 04 16:07:46 crc kubenswrapper[4946]: I1204 16:07:46.880193 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/extract/0.log" Dec 04 16:07:47 crc kubenswrapper[4946]: I1204 16:07:47.163153 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/util/0.log" Dec 04 16:07:47 crc kubenswrapper[4946]: I1204 16:07:47.425085 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/pull/0.log" Dec 04 16:07:47 crc kubenswrapper[4946]: I1204 16:07:47.435357 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/pull/0.log" Dec 04 16:07:47 crc kubenswrapper[4946]: I1204 16:07:47.476934 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/util/0.log" Dec 04 16:07:47 crc kubenswrapper[4946]: I1204 16:07:47.708756 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/util/0.log" Dec 04 16:07:47 crc kubenswrapper[4946]: I1204 16:07:47.756248 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/pull/0.log" Dec 04 16:07:47 crc kubenswrapper[4946]: I1204 16:07:47.904113 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/extract/0.log" Dec 04 16:07:47 crc kubenswrapper[4946]: I1204 16:07:47.905557 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-utilities/0.log" Dec 04 16:07:48 crc kubenswrapper[4946]: I1204 16:07:48.191341 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-content/0.log" Dec 04 16:07:48 crc kubenswrapper[4946]: I1204 16:07:48.219481 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-utilities/0.log" Dec 04 16:07:48 crc kubenswrapper[4946]: I1204 16:07:48.251319 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-content/0.log" Dec 04 16:07:48 crc kubenswrapper[4946]: I1204 16:07:48.681086 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-utilities/0.log" Dec 04 16:07:48 crc kubenswrapper[4946]: I1204 16:07:48.718096 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-content/0.log" Dec 04 16:07:48 crc kubenswrapper[4946]: I1204 16:07:48.978010 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-utilities/0.log" Dec 04 16:07:49 crc kubenswrapper[4946]: I1204 16:07:49.248447 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/registry-server/0.log" Dec 04 16:07:49 crc kubenswrapper[4946]: I1204 16:07:49.283963 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-content/0.log" Dec 04 16:07:49 crc kubenswrapper[4946]: I1204 16:07:49.337165 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-utilities/0.log" Dec 04 16:07:49 crc kubenswrapper[4946]: I1204 16:07:49.375173 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-content/0.log" Dec 04 16:07:49 crc kubenswrapper[4946]: I1204 16:07:49.572845 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-utilities/0.log" Dec 04 16:07:49 crc kubenswrapper[4946]: I1204 16:07:49.601397 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-content/0.log" Dec 04 16:07:49 crc kubenswrapper[4946]: I1204 16:07:49.670828 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-2gvfq_0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8/marketplace-operator/0.log" Dec 04 16:07:49 crc kubenswrapper[4946]: I1204 16:07:49.862274 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-utilities/0.log" Dec 04 16:07:50 crc kubenswrapper[4946]: I1204 16:07:50.126174 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/registry-server/0.log" Dec 04 16:07:50 crc kubenswrapper[4946]: I1204 16:07:50.214726 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-utilities/0.log" Dec 04 16:07:50 crc kubenswrapper[4946]: I1204 16:07:50.221167 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-content/0.log" Dec 04 16:07:50 crc kubenswrapper[4946]: I1204 16:07:50.241923 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-content/0.log" Dec 04 16:07:50 crc kubenswrapper[4946]: I1204 16:07:50.561364 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-content/0.log" Dec 04 16:07:50 crc kubenswrapper[4946]: I1204 16:07:50.602792 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-utilities/0.log" Dec 04 16:07:50 crc kubenswrapper[4946]: I1204 16:07:50.630327 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-utilities/0.log" Dec 04 16:07:50 crc kubenswrapper[4946]: I1204 16:07:50.739374 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/registry-server/0.log" Dec 04 16:07:50 crc kubenswrapper[4946]: I1204 16:07:50.890875 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-utilities/0.log" Dec 04 16:07:50 crc kubenswrapper[4946]: I1204 16:07:50.893912 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-content/0.log" Dec 04 16:07:50 crc kubenswrapper[4946]: I1204 16:07:50.924465 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-content/0.log" Dec 04 16:07:51 crc kubenswrapper[4946]: I1204 16:07:51.141976 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-utilities/0.log" Dec 04 16:07:51 crc kubenswrapper[4946]: I1204 16:07:51.173929 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-content/0.log" Dec 04 16:07:51 crc kubenswrapper[4946]: I1204 16:07:51.325513 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/registry-server/0.log" Dec 04 16:07:54 crc kubenswrapper[4946]: I1204 16:07:54.453873 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:07:54 crc kubenswrapper[4946]: E1204 16:07:54.454744 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:08:07 crc kubenswrapper[4946]: I1204 16:08:07.453298 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:08:07 crc kubenswrapper[4946]: E1204 16:08:07.454310 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:08:09 crc kubenswrapper[4946]: I1204 16:08:09.348216 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-tjsxl_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6/prometheus-operator/0.log" Dec 04 16:08:09 crc kubenswrapper[4946]: I1204 16:08:09.603046 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11/prometheus-operator-admission-webhook/0.log" Dec 04 16:08:09 crc kubenswrapper[4946]: I1204 16:08:09.635608 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_695e348c-7a92-4a69-b104-1f37361d5c49/prometheus-operator-admission-webhook/0.log" Dec 04 16:08:09 crc kubenswrapper[4946]: I1204 16:08:09.903805 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-rf6pw_c9624505-3974-47fb-93d7-1a2ff73b29c7/perses-operator/0.log" Dec 04 16:08:09 crc kubenswrapper[4946]: I1204 16:08:09.927277 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-bmpm5_adb38877-f50c-48aa-a3ca-951150033479/operator/0.log" Dec 04 16:08:22 crc kubenswrapper[4946]: I1204 16:08:22.453098 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:08:22 crc kubenswrapper[4946]: E1204 16:08:22.454255 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:08:28 crc kubenswrapper[4946]: I1204 16:08:28.648930 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5f5b48f4dc-5fjct_14739c62-fc32-41a5-be6d-3f6673c6a231/kube-rbac-proxy/0.log" Dec 04 16:08:28 crc kubenswrapper[4946]: I1204 16:08:28.673733 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5f5b48f4dc-5fjct_14739c62-fc32-41a5-be6d-3f6673c6a231/manager/0.log" Dec 04 16:08:35 crc kubenswrapper[4946]: I1204 16:08:35.453518 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:08:35 crc kubenswrapper[4946]: E1204 16:08:35.454520 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.465083 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-z9x4q"] Dec 04 16:08:38 crc kubenswrapper[4946]: E1204 16:08:38.466352 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="798180d6-b704-43bf-b80e-bfee9add7a03" containerName="container-00" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.466368 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="798180d6-b704-43bf-b80e-bfee9add7a03" containerName="container-00" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.466620 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="798180d6-b704-43bf-b80e-bfee9add7a03" containerName="container-00" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.469998 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.484493 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z9x4q"] Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.537663 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm99q\" (UniqueName: \"kubernetes.io/projected/7bbd6173-4af0-46f7-a811-623876d6ed7f-kube-api-access-jm99q\") pod \"redhat-marketplace-z9x4q\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.538122 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-catalog-content\") pod \"redhat-marketplace-z9x4q\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.538277 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-utilities\") pod \"redhat-marketplace-z9x4q\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.640891 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-catalog-content\") pod \"redhat-marketplace-z9x4q\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.640971 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-utilities\") pod \"redhat-marketplace-z9x4q\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.641129 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm99q\" (UniqueName: \"kubernetes.io/projected/7bbd6173-4af0-46f7-a811-623876d6ed7f-kube-api-access-jm99q\") pod \"redhat-marketplace-z9x4q\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.641521 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-catalog-content\") pod \"redhat-marketplace-z9x4q\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.641616 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-utilities\") pod \"redhat-marketplace-z9x4q\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.666282 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm99q\" (UniqueName: \"kubernetes.io/projected/7bbd6173-4af0-46f7-a811-623876d6ed7f-kube-api-access-jm99q\") pod \"redhat-marketplace-z9x4q\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:38 crc kubenswrapper[4946]: I1204 16:08:38.838283 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:39 crc kubenswrapper[4946]: W1204 16:08:39.484406 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7bbd6173_4af0_46f7_a811_623876d6ed7f.slice/crio-30fa650b50e427e74dbc8f0cb2f2bb98efef1ead5f3005a6c2a9c8a628d1b321 WatchSource:0}: Error finding container 30fa650b50e427e74dbc8f0cb2f2bb98efef1ead5f3005a6c2a9c8a628d1b321: Status 404 returned error can't find the container with id 30fa650b50e427e74dbc8f0cb2f2bb98efef1ead5f3005a6c2a9c8a628d1b321 Dec 04 16:08:39 crc kubenswrapper[4946]: I1204 16:08:39.512511 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z9x4q"] Dec 04 16:08:40 crc kubenswrapper[4946]: I1204 16:08:40.268504 4946 generic.go:334] "Generic (PLEG): container finished" podID="7bbd6173-4af0-46f7-a811-623876d6ed7f" containerID="fa600529e645398f9e364d186ae3bb701e9c54b67cd411c4c41f5afbcc8b8f56" exitCode=0 Dec 04 16:08:40 crc kubenswrapper[4946]: I1204 16:08:40.268592 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9x4q" event={"ID":"7bbd6173-4af0-46f7-a811-623876d6ed7f","Type":"ContainerDied","Data":"fa600529e645398f9e364d186ae3bb701e9c54b67cd411c4c41f5afbcc8b8f56"} Dec 04 16:08:40 crc kubenswrapper[4946]: I1204 16:08:40.268940 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9x4q" event={"ID":"7bbd6173-4af0-46f7-a811-623876d6ed7f","Type":"ContainerStarted","Data":"30fa650b50e427e74dbc8f0cb2f2bb98efef1ead5f3005a6c2a9c8a628d1b321"} Dec 04 16:08:40 crc kubenswrapper[4946]: I1204 16:08:40.272231 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 16:08:41 crc kubenswrapper[4946]: I1204 16:08:41.281742 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9x4q" event={"ID":"7bbd6173-4af0-46f7-a811-623876d6ed7f","Type":"ContainerStarted","Data":"3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5"} Dec 04 16:08:41 crc kubenswrapper[4946]: I1204 16:08:41.856452 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b5ns7"] Dec 04 16:08:41 crc kubenswrapper[4946]: I1204 16:08:41.859432 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:41 crc kubenswrapper[4946]: I1204 16:08:41.881464 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b5ns7"] Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.030884 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdwt2\" (UniqueName: \"kubernetes.io/projected/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-kube-api-access-tdwt2\") pod \"redhat-operators-b5ns7\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.031106 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-catalog-content\") pod \"redhat-operators-b5ns7\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.031333 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-utilities\") pod \"redhat-operators-b5ns7\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.135659 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-catalog-content\") pod \"redhat-operators-b5ns7\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.135714 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-utilities\") pod \"redhat-operators-b5ns7\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.135868 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdwt2\" (UniqueName: \"kubernetes.io/projected/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-kube-api-access-tdwt2\") pod \"redhat-operators-b5ns7\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.136228 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-catalog-content\") pod \"redhat-operators-b5ns7\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.136511 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-utilities\") pod \"redhat-operators-b5ns7\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.169867 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdwt2\" (UniqueName: \"kubernetes.io/projected/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-kube-api-access-tdwt2\") pod \"redhat-operators-b5ns7\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.201256 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.299379 4946 generic.go:334] "Generic (PLEG): container finished" podID="7bbd6173-4af0-46f7-a811-623876d6ed7f" containerID="3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5" exitCode=0 Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.299452 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9x4q" event={"ID":"7bbd6173-4af0-46f7-a811-623876d6ed7f","Type":"ContainerDied","Data":"3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5"} Dec 04 16:08:42 crc kubenswrapper[4946]: I1204 16:08:42.779927 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b5ns7"] Dec 04 16:08:43 crc kubenswrapper[4946]: I1204 16:08:43.314871 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9x4q" event={"ID":"7bbd6173-4af0-46f7-a811-623876d6ed7f","Type":"ContainerStarted","Data":"97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d"} Dec 04 16:08:43 crc kubenswrapper[4946]: I1204 16:08:43.320857 4946 generic.go:334] "Generic (PLEG): container finished" podID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerID="a8e03593b376a31463108234f3c0d97538d889ee026b64db68e643b89e8eb963" exitCode=0 Dec 04 16:08:43 crc kubenswrapper[4946]: I1204 16:08:43.320899 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5ns7" event={"ID":"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02","Type":"ContainerDied","Data":"a8e03593b376a31463108234f3c0d97538d889ee026b64db68e643b89e8eb963"} Dec 04 16:08:43 crc kubenswrapper[4946]: I1204 16:08:43.320920 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5ns7" event={"ID":"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02","Type":"ContainerStarted","Data":"30c1a6e637a82e9a7d5ed1a8d361c454b70dd42d3cb1ede0795149067a044685"} Dec 04 16:08:43 crc kubenswrapper[4946]: I1204 16:08:43.345489 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-z9x4q" podStartSLOduration=2.817398958 podStartE2EDuration="5.345465919s" podCreationTimestamp="2025-12-04 16:08:38 +0000 UTC" firstStartedPulling="2025-12-04 16:08:40.271980828 +0000 UTC m=+3971.158024469" lastFinishedPulling="2025-12-04 16:08:42.800047789 +0000 UTC m=+3973.686091430" observedRunningTime="2025-12-04 16:08:43.339035478 +0000 UTC m=+3974.225079119" watchObservedRunningTime="2025-12-04 16:08:43.345465919 +0000 UTC m=+3974.231509560" Dec 04 16:08:45 crc kubenswrapper[4946]: I1204 16:08:45.348599 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5ns7" event={"ID":"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02","Type":"ContainerStarted","Data":"05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b"} Dec 04 16:08:48 crc kubenswrapper[4946]: I1204 16:08:48.395044 4946 generic.go:334] "Generic (PLEG): container finished" podID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerID="05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b" exitCode=0 Dec 04 16:08:48 crc kubenswrapper[4946]: I1204 16:08:48.395161 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5ns7" event={"ID":"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02","Type":"ContainerDied","Data":"05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b"} Dec 04 16:08:48 crc kubenswrapper[4946]: I1204 16:08:48.453059 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:08:48 crc kubenswrapper[4946]: E1204 16:08:48.453357 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:08:48 crc kubenswrapper[4946]: I1204 16:08:48.838688 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:48 crc kubenswrapper[4946]: I1204 16:08:48.838801 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:48 crc kubenswrapper[4946]: I1204 16:08:48.916017 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:49 crc kubenswrapper[4946]: I1204 16:08:49.416967 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5ns7" event={"ID":"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02","Type":"ContainerStarted","Data":"3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b"} Dec 04 16:08:49 crc kubenswrapper[4946]: I1204 16:08:49.448428 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b5ns7" podStartSLOduration=2.959395784 podStartE2EDuration="8.448403221s" podCreationTimestamp="2025-12-04 16:08:41 +0000 UTC" firstStartedPulling="2025-12-04 16:08:43.323094974 +0000 UTC m=+3974.209138605" lastFinishedPulling="2025-12-04 16:08:48.812102401 +0000 UTC m=+3979.698146042" observedRunningTime="2025-12-04 16:08:49.435735203 +0000 UTC m=+3980.321778844" watchObservedRunningTime="2025-12-04 16:08:49.448403221 +0000 UTC m=+3980.334446862" Dec 04 16:08:49 crc kubenswrapper[4946]: I1204 16:08:49.494776 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:51 crc kubenswrapper[4946]: I1204 16:08:51.549743 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z9x4q"] Dec 04 16:08:52 crc kubenswrapper[4946]: I1204 16:08:52.202010 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:52 crc kubenswrapper[4946]: I1204 16:08:52.202079 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:08:52 crc kubenswrapper[4946]: I1204 16:08:52.481643 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-z9x4q" podUID="7bbd6173-4af0-46f7-a811-623876d6ed7f" containerName="registry-server" containerID="cri-o://97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d" gracePeriod=2 Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.268897 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-b5ns7" podUID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerName="registry-server" probeResult="failure" output=< Dec 04 16:08:53 crc kubenswrapper[4946]: timeout: failed to connect service ":50051" within 1s Dec 04 16:08:53 crc kubenswrapper[4946]: > Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.468045 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.496475 4946 generic.go:334] "Generic (PLEG): container finished" podID="7bbd6173-4af0-46f7-a811-623876d6ed7f" containerID="97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d" exitCode=0 Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.496531 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9x4q" event={"ID":"7bbd6173-4af0-46f7-a811-623876d6ed7f","Type":"ContainerDied","Data":"97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d"} Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.496558 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9x4q" event={"ID":"7bbd6173-4af0-46f7-a811-623876d6ed7f","Type":"ContainerDied","Data":"30fa650b50e427e74dbc8f0cb2f2bb98efef1ead5f3005a6c2a9c8a628d1b321"} Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.496579 4946 scope.go:117] "RemoveContainer" containerID="97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.496586 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z9x4q" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.537847 4946 scope.go:117] "RemoveContainer" containerID="3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.575419 4946 scope.go:117] "RemoveContainer" containerID="fa600529e645398f9e364d186ae3bb701e9c54b67cd411c4c41f5afbcc8b8f56" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.599897 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-catalog-content\") pod \"7bbd6173-4af0-46f7-a811-623876d6ed7f\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.600036 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-utilities\") pod \"7bbd6173-4af0-46f7-a811-623876d6ed7f\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.600123 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm99q\" (UniqueName: \"kubernetes.io/projected/7bbd6173-4af0-46f7-a811-623876d6ed7f-kube-api-access-jm99q\") pod \"7bbd6173-4af0-46f7-a811-623876d6ed7f\" (UID: \"7bbd6173-4af0-46f7-a811-623876d6ed7f\") " Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.602561 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-utilities" (OuterVolumeSpecName: "utilities") pod "7bbd6173-4af0-46f7-a811-623876d6ed7f" (UID: "7bbd6173-4af0-46f7-a811-623876d6ed7f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.609985 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bbd6173-4af0-46f7-a811-623876d6ed7f-kube-api-access-jm99q" (OuterVolumeSpecName: "kube-api-access-jm99q") pod "7bbd6173-4af0-46f7-a811-623876d6ed7f" (UID: "7bbd6173-4af0-46f7-a811-623876d6ed7f"). InnerVolumeSpecName "kube-api-access-jm99q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.620243 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7bbd6173-4af0-46f7-a811-623876d6ed7f" (UID: "7bbd6173-4af0-46f7-a811-623876d6ed7f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.645586 4946 scope.go:117] "RemoveContainer" containerID="97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d" Dec 04 16:08:53 crc kubenswrapper[4946]: E1204 16:08:53.646508 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d\": container with ID starting with 97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d not found: ID does not exist" containerID="97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.646563 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d"} err="failed to get container status \"97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d\": rpc error: code = NotFound desc = could not find container \"97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d\": container with ID starting with 97e6f86d076e5e9a19805c027b6e5a6b9b61f1df2e905e80a6952dfad66ad18d not found: ID does not exist" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.646595 4946 scope.go:117] "RemoveContainer" containerID="3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5" Dec 04 16:08:53 crc kubenswrapper[4946]: E1204 16:08:53.647308 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5\": container with ID starting with 3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5 not found: ID does not exist" containerID="3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.647367 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5"} err="failed to get container status \"3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5\": rpc error: code = NotFound desc = could not find container \"3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5\": container with ID starting with 3663663345babb65db33058c962ea0c687fd3f864a8bf46cdbc2bc4f3df462c5 not found: ID does not exist" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.647402 4946 scope.go:117] "RemoveContainer" containerID="fa600529e645398f9e364d186ae3bb701e9c54b67cd411c4c41f5afbcc8b8f56" Dec 04 16:08:53 crc kubenswrapper[4946]: E1204 16:08:53.648218 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa600529e645398f9e364d186ae3bb701e9c54b67cd411c4c41f5afbcc8b8f56\": container with ID starting with fa600529e645398f9e364d186ae3bb701e9c54b67cd411c4c41f5afbcc8b8f56 not found: ID does not exist" containerID="fa600529e645398f9e364d186ae3bb701e9c54b67cd411c4c41f5afbcc8b8f56" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.648278 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa600529e645398f9e364d186ae3bb701e9c54b67cd411c4c41f5afbcc8b8f56"} err="failed to get container status \"fa600529e645398f9e364d186ae3bb701e9c54b67cd411c4c41f5afbcc8b8f56\": rpc error: code = NotFound desc = could not find container \"fa600529e645398f9e364d186ae3bb701e9c54b67cd411c4c41f5afbcc8b8f56\": container with ID starting with fa600529e645398f9e364d186ae3bb701e9c54b67cd411c4c41f5afbcc8b8f56 not found: ID does not exist" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.708981 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.709477 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bbd6173-4af0-46f7-a811-623876d6ed7f-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.709488 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm99q\" (UniqueName: \"kubernetes.io/projected/7bbd6173-4af0-46f7-a811-623876d6ed7f-kube-api-access-jm99q\") on node \"crc\" DevicePath \"\"" Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.863621 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z9x4q"] Dec 04 16:08:53 crc kubenswrapper[4946]: I1204 16:08:53.876004 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-z9x4q"] Dec 04 16:08:55 crc kubenswrapper[4946]: I1204 16:08:55.467284 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bbd6173-4af0-46f7-a811-623876d6ed7f" path="/var/lib/kubelet/pods/7bbd6173-4af0-46f7-a811-623876d6ed7f/volumes" Dec 04 16:09:01 crc kubenswrapper[4946]: I1204 16:09:01.462449 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:09:01 crc kubenswrapper[4946]: E1204 16:09:01.463613 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:09:02 crc kubenswrapper[4946]: I1204 16:09:02.249362 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:09:02 crc kubenswrapper[4946]: I1204 16:09:02.295668 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:09:02 crc kubenswrapper[4946]: I1204 16:09:02.514428 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b5ns7"] Dec 04 16:09:03 crc kubenswrapper[4946]: I1204 16:09:03.643079 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b5ns7" podUID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerName="registry-server" containerID="cri-o://3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b" gracePeriod=2 Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.457308 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.619863 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-catalog-content\") pod \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.620844 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-utilities\") pod \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.621853 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-utilities" (OuterVolumeSpecName: "utilities") pod "0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" (UID: "0e3f6e8a-1410-458a-82c2-8cb4e17bdb02"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.621930 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdwt2\" (UniqueName: \"kubernetes.io/projected/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-kube-api-access-tdwt2\") pod \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\" (UID: \"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02\") " Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.623737 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.629952 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-kube-api-access-tdwt2" (OuterVolumeSpecName: "kube-api-access-tdwt2") pod "0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" (UID: "0e3f6e8a-1410-458a-82c2-8cb4e17bdb02"). InnerVolumeSpecName "kube-api-access-tdwt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.660849 4946 generic.go:334] "Generic (PLEG): container finished" podID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerID="3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b" exitCode=0 Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.660905 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5ns7" event={"ID":"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02","Type":"ContainerDied","Data":"3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b"} Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.660948 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5ns7" event={"ID":"0e3f6e8a-1410-458a-82c2-8cb4e17bdb02","Type":"ContainerDied","Data":"30c1a6e637a82e9a7d5ed1a8d361c454b70dd42d3cb1ede0795149067a044685"} Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.660969 4946 scope.go:117] "RemoveContainer" containerID="3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.660969 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5ns7" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.727240 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdwt2\" (UniqueName: \"kubernetes.io/projected/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-kube-api-access-tdwt2\") on node \"crc\" DevicePath \"\"" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.738341 4946 scope.go:117] "RemoveContainer" containerID="05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.767941 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" (UID: "0e3f6e8a-1410-458a-82c2-8cb4e17bdb02"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.777261 4946 scope.go:117] "RemoveContainer" containerID="a8e03593b376a31463108234f3c0d97538d889ee026b64db68e643b89e8eb963" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.829443 4946 scope.go:117] "RemoveContainer" containerID="3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b" Dec 04 16:09:04 crc kubenswrapper[4946]: E1204 16:09:04.830031 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b\": container with ID starting with 3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b not found: ID does not exist" containerID="3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.830079 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b"} err="failed to get container status \"3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b\": rpc error: code = NotFound desc = could not find container \"3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b\": container with ID starting with 3f5d9384b02cc12d701e001d97748077402325cbbcc56078162dbe8f9759332b not found: ID does not exist" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.830105 4946 scope.go:117] "RemoveContainer" containerID="05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.830143 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 16:09:04 crc kubenswrapper[4946]: E1204 16:09:04.832853 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b\": container with ID starting with 05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b not found: ID does not exist" containerID="05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.832960 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b"} err="failed to get container status \"05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b\": rpc error: code = NotFound desc = could not find container \"05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b\": container with ID starting with 05e3165b9dc5ca9f23d4cdf1f5c8a1208ae5d31207a77f7626c1181133e9ec2b not found: ID does not exist" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.833040 4946 scope.go:117] "RemoveContainer" containerID="a8e03593b376a31463108234f3c0d97538d889ee026b64db68e643b89e8eb963" Dec 04 16:09:04 crc kubenswrapper[4946]: E1204 16:09:04.833716 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8e03593b376a31463108234f3c0d97538d889ee026b64db68e643b89e8eb963\": container with ID starting with a8e03593b376a31463108234f3c0d97538d889ee026b64db68e643b89e8eb963 not found: ID does not exist" containerID="a8e03593b376a31463108234f3c0d97538d889ee026b64db68e643b89e8eb963" Dec 04 16:09:04 crc kubenswrapper[4946]: I1204 16:09:04.833775 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8e03593b376a31463108234f3c0d97538d889ee026b64db68e643b89e8eb963"} err="failed to get container status \"a8e03593b376a31463108234f3c0d97538d889ee026b64db68e643b89e8eb963\": rpc error: code = NotFound desc = could not find container \"a8e03593b376a31463108234f3c0d97538d889ee026b64db68e643b89e8eb963\": container with ID starting with a8e03593b376a31463108234f3c0d97538d889ee026b64db68e643b89e8eb963 not found: ID does not exist" Dec 04 16:09:05 crc kubenswrapper[4946]: I1204 16:09:05.011945 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b5ns7"] Dec 04 16:09:05 crc kubenswrapper[4946]: I1204 16:09:05.034744 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b5ns7"] Dec 04 16:09:05 crc kubenswrapper[4946]: I1204 16:09:05.466513 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" path="/var/lib/kubelet/pods/0e3f6e8a-1410-458a-82c2-8cb4e17bdb02/volumes" Dec 04 16:09:14 crc kubenswrapper[4946]: I1204 16:09:14.453424 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:09:14 crc kubenswrapper[4946]: E1204 16:09:14.454429 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:09:25 crc kubenswrapper[4946]: I1204 16:09:25.453722 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:09:25 crc kubenswrapper[4946]: I1204 16:09:25.987198 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"43c4931322976c77430de96205a2c957b71c7e81d18e6e2e9fc9b080b7e4614f"} Dec 04 16:09:52 crc kubenswrapper[4946]: I1204 16:09:52.876418 4946 scope.go:117] "RemoveContainer" containerID="039457f6257a78c742abaf5dd55e7a974f673504915be1c6618be8087d29a4b6" Dec 04 16:10:36 crc kubenswrapper[4946]: I1204 16:10:36.862824 4946 generic.go:334] "Generic (PLEG): container finished" podID="582095c0-2850-4d2e-869e-bb61c982f955" containerID="e2ca6b4b6330db67bb69a3669847f668b5afcc742ea45699e888acbcdd17cbf9" exitCode=0 Dec 04 16:10:36 crc kubenswrapper[4946]: I1204 16:10:36.862883 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8qm9f/must-gather-hqmjm" event={"ID":"582095c0-2850-4d2e-869e-bb61c982f955","Type":"ContainerDied","Data":"e2ca6b4b6330db67bb69a3669847f668b5afcc742ea45699e888acbcdd17cbf9"} Dec 04 16:10:36 crc kubenswrapper[4946]: I1204 16:10:36.864647 4946 scope.go:117] "RemoveContainer" containerID="e2ca6b4b6330db67bb69a3669847f668b5afcc742ea45699e888acbcdd17cbf9" Dec 04 16:10:37 crc kubenswrapper[4946]: I1204 16:10:37.856568 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8qm9f_must-gather-hqmjm_582095c0-2850-4d2e-869e-bb61c982f955/gather/0.log" Dec 04 16:10:46 crc kubenswrapper[4946]: I1204 16:10:46.527009 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8qm9f/must-gather-hqmjm"] Dec 04 16:10:46 crc kubenswrapper[4946]: I1204 16:10:46.528059 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-8qm9f/must-gather-hqmjm" podUID="582095c0-2850-4d2e-869e-bb61c982f955" containerName="copy" containerID="cri-o://306a76101335d5c82988ce9b793966d8beaf0c2bd1607be17304e637f7dc2984" gracePeriod=2 Dec 04 16:10:46 crc kubenswrapper[4946]: I1204 16:10:46.538867 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8qm9f/must-gather-hqmjm"] Dec 04 16:10:47 crc kubenswrapper[4946]: I1204 16:10:47.021921 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8qm9f_must-gather-hqmjm_582095c0-2850-4d2e-869e-bb61c982f955/copy/0.log" Dec 04 16:10:47 crc kubenswrapper[4946]: I1204 16:10:47.024367 4946 generic.go:334] "Generic (PLEG): container finished" podID="582095c0-2850-4d2e-869e-bb61c982f955" containerID="306a76101335d5c82988ce9b793966d8beaf0c2bd1607be17304e637f7dc2984" exitCode=143 Dec 04 16:10:47 crc kubenswrapper[4946]: I1204 16:10:47.341983 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8qm9f_must-gather-hqmjm_582095c0-2850-4d2e-869e-bb61c982f955/copy/0.log" Dec 04 16:10:47 crc kubenswrapper[4946]: I1204 16:10:47.342907 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/must-gather-hqmjm" Dec 04 16:10:47 crc kubenswrapper[4946]: I1204 16:10:47.480612 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/582095c0-2850-4d2e-869e-bb61c982f955-must-gather-output\") pod \"582095c0-2850-4d2e-869e-bb61c982f955\" (UID: \"582095c0-2850-4d2e-869e-bb61c982f955\") " Dec 04 16:10:47 crc kubenswrapper[4946]: I1204 16:10:47.480728 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptdmd\" (UniqueName: \"kubernetes.io/projected/582095c0-2850-4d2e-869e-bb61c982f955-kube-api-access-ptdmd\") pod \"582095c0-2850-4d2e-869e-bb61c982f955\" (UID: \"582095c0-2850-4d2e-869e-bb61c982f955\") " Dec 04 16:10:47 crc kubenswrapper[4946]: I1204 16:10:47.523514 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/582095c0-2850-4d2e-869e-bb61c982f955-kube-api-access-ptdmd" (OuterVolumeSpecName: "kube-api-access-ptdmd") pod "582095c0-2850-4d2e-869e-bb61c982f955" (UID: "582095c0-2850-4d2e-869e-bb61c982f955"). InnerVolumeSpecName "kube-api-access-ptdmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:10:47 crc kubenswrapper[4946]: I1204 16:10:47.596938 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptdmd\" (UniqueName: \"kubernetes.io/projected/582095c0-2850-4d2e-869e-bb61c982f955-kube-api-access-ptdmd\") on node \"crc\" DevicePath \"\"" Dec 04 16:10:47 crc kubenswrapper[4946]: I1204 16:10:47.743674 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/582095c0-2850-4d2e-869e-bb61c982f955-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "582095c0-2850-4d2e-869e-bb61c982f955" (UID: "582095c0-2850-4d2e-869e-bb61c982f955"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:10:47 crc kubenswrapper[4946]: I1204 16:10:47.802477 4946 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/582095c0-2850-4d2e-869e-bb61c982f955-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 04 16:10:48 crc kubenswrapper[4946]: I1204 16:10:48.037025 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8qm9f_must-gather-hqmjm_582095c0-2850-4d2e-869e-bb61c982f955/copy/0.log" Dec 04 16:10:48 crc kubenswrapper[4946]: I1204 16:10:48.037857 4946 scope.go:117] "RemoveContainer" containerID="306a76101335d5c82988ce9b793966d8beaf0c2bd1607be17304e637f7dc2984" Dec 04 16:10:48 crc kubenswrapper[4946]: I1204 16:10:48.037937 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8qm9f/must-gather-hqmjm" Dec 04 16:10:48 crc kubenswrapper[4946]: I1204 16:10:48.069180 4946 scope.go:117] "RemoveContainer" containerID="e2ca6b4b6330db67bb69a3669847f668b5afcc742ea45699e888acbcdd17cbf9" Dec 04 16:10:49 crc kubenswrapper[4946]: I1204 16:10:49.469177 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="582095c0-2850-4d2e-869e-bb61c982f955" path="/var/lib/kubelet/pods/582095c0-2850-4d2e-869e-bb61c982f955/volumes" Dec 04 16:10:52 crc kubenswrapper[4946]: I1204 16:10:52.991778 4946 scope.go:117] "RemoveContainer" containerID="b7f34d1fa2c8fbf441461e9d43e03142e71bf37399f128e197b1b4cea80c0960" Dec 04 16:11:52 crc kubenswrapper[4946]: I1204 16:11:52.478640 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:11:52 crc kubenswrapper[4946]: I1204 16:11:52.479440 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.538160 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-t7b87"] Dec 04 16:12:15 crc kubenswrapper[4946]: E1204 16:12:15.539818 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerName="extract-utilities" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.539859 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerName="extract-utilities" Dec 04 16:12:15 crc kubenswrapper[4946]: E1204 16:12:15.539879 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bbd6173-4af0-46f7-a811-623876d6ed7f" containerName="extract-content" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.539888 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bbd6173-4af0-46f7-a811-623876d6ed7f" containerName="extract-content" Dec 04 16:12:15 crc kubenswrapper[4946]: E1204 16:12:15.539905 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerName="extract-content" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.539916 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerName="extract-content" Dec 04 16:12:15 crc kubenswrapper[4946]: E1204 16:12:15.539947 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="582095c0-2850-4d2e-869e-bb61c982f955" containerName="gather" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.539956 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="582095c0-2850-4d2e-869e-bb61c982f955" containerName="gather" Dec 04 16:12:15 crc kubenswrapper[4946]: E1204 16:12:15.539970 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerName="registry-server" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.539978 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerName="registry-server" Dec 04 16:12:15 crc kubenswrapper[4946]: E1204 16:12:15.540059 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bbd6173-4af0-46f7-a811-623876d6ed7f" containerName="registry-server" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.540068 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bbd6173-4af0-46f7-a811-623876d6ed7f" containerName="registry-server" Dec 04 16:12:15 crc kubenswrapper[4946]: E1204 16:12:15.540098 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bbd6173-4af0-46f7-a811-623876d6ed7f" containerName="extract-utilities" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.540108 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bbd6173-4af0-46f7-a811-623876d6ed7f" containerName="extract-utilities" Dec 04 16:12:15 crc kubenswrapper[4946]: E1204 16:12:15.540156 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="582095c0-2850-4d2e-869e-bb61c982f955" containerName="copy" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.540164 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="582095c0-2850-4d2e-869e-bb61c982f955" containerName="copy" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.540553 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e3f6e8a-1410-458a-82c2-8cb4e17bdb02" containerName="registry-server" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.540583 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bbd6173-4af0-46f7-a811-623876d6ed7f" containerName="registry-server" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.540629 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="582095c0-2850-4d2e-869e-bb61c982f955" containerName="gather" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.540648 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="582095c0-2850-4d2e-869e-bb61c982f955" containerName="copy" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.543926 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.568619 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t7b87"] Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.679128 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d29m9\" (UniqueName: \"kubernetes.io/projected/f9953c2c-353a-4103-9061-795f29b8c9a9-kube-api-access-d29m9\") pod \"community-operators-t7b87\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.679873 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-utilities\") pod \"community-operators-t7b87\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.679901 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-catalog-content\") pod \"community-operators-t7b87\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.782296 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d29m9\" (UniqueName: \"kubernetes.io/projected/f9953c2c-353a-4103-9061-795f29b8c9a9-kube-api-access-d29m9\") pod \"community-operators-t7b87\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.782490 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-catalog-content\") pod \"community-operators-t7b87\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.782521 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-utilities\") pod \"community-operators-t7b87\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.783017 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-catalog-content\") pod \"community-operators-t7b87\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.783033 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-utilities\") pod \"community-operators-t7b87\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.809466 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d29m9\" (UniqueName: \"kubernetes.io/projected/f9953c2c-353a-4103-9061-795f29b8c9a9-kube-api-access-d29m9\") pod \"community-operators-t7b87\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:15 crc kubenswrapper[4946]: I1204 16:12:15.875728 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:16 crc kubenswrapper[4946]: I1204 16:12:16.455014 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t7b87"] Dec 04 16:12:17 crc kubenswrapper[4946]: I1204 16:12:17.190892 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7b87" event={"ID":"f9953c2c-353a-4103-9061-795f29b8c9a9","Type":"ContainerStarted","Data":"35fc73a41951e0b5a8a0bfaf01d02d97ab9cc86cad1b7ca42263c01459fe7551"} Dec 04 16:12:18 crc kubenswrapper[4946]: I1204 16:12:18.205831 4946 generic.go:334] "Generic (PLEG): container finished" podID="f9953c2c-353a-4103-9061-795f29b8c9a9" containerID="7ad4b30910b5a3d7eb95888b9fd9bb37b4f06eff3884ac3810e029c894e0c5c7" exitCode=0 Dec 04 16:12:18 crc kubenswrapper[4946]: I1204 16:12:18.205910 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7b87" event={"ID":"f9953c2c-353a-4103-9061-795f29b8c9a9","Type":"ContainerDied","Data":"7ad4b30910b5a3d7eb95888b9fd9bb37b4f06eff3884ac3810e029c894e0c5c7"} Dec 04 16:12:19 crc kubenswrapper[4946]: I1204 16:12:19.220308 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7b87" event={"ID":"f9953c2c-353a-4103-9061-795f29b8c9a9","Type":"ContainerStarted","Data":"7e689c13733a16a12ac7b9edfd2756c22e96b8b6a0a8829e084ec5c751bf56ab"} Dec 04 16:12:20 crc kubenswrapper[4946]: I1204 16:12:20.237595 4946 generic.go:334] "Generic (PLEG): container finished" podID="f9953c2c-353a-4103-9061-795f29b8c9a9" containerID="7e689c13733a16a12ac7b9edfd2756c22e96b8b6a0a8829e084ec5c751bf56ab" exitCode=0 Dec 04 16:12:20 crc kubenswrapper[4946]: I1204 16:12:20.237874 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7b87" event={"ID":"f9953c2c-353a-4103-9061-795f29b8c9a9","Type":"ContainerDied","Data":"7e689c13733a16a12ac7b9edfd2756c22e96b8b6a0a8829e084ec5c751bf56ab"} Dec 04 16:12:22 crc kubenswrapper[4946]: I1204 16:12:22.264454 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7b87" event={"ID":"f9953c2c-353a-4103-9061-795f29b8c9a9","Type":"ContainerStarted","Data":"2519e2f0b721d491b8efadce090a681296cf3984c3caf95cafee45594f0ad962"} Dec 04 16:12:22 crc kubenswrapper[4946]: I1204 16:12:22.288865 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-t7b87" podStartSLOduration=4.443978822 podStartE2EDuration="7.288843057s" podCreationTimestamp="2025-12-04 16:12:15 +0000 UTC" firstStartedPulling="2025-12-04 16:12:18.208107801 +0000 UTC m=+4189.094151442" lastFinishedPulling="2025-12-04 16:12:21.052972036 +0000 UTC m=+4191.939015677" observedRunningTime="2025-12-04 16:12:22.285988371 +0000 UTC m=+4193.172032012" watchObservedRunningTime="2025-12-04 16:12:22.288843057 +0000 UTC m=+4193.174886698" Dec 04 16:12:22 crc kubenswrapper[4946]: I1204 16:12:22.479997 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:12:22 crc kubenswrapper[4946]: I1204 16:12:22.480061 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:12:25 crc kubenswrapper[4946]: I1204 16:12:25.876088 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:25 crc kubenswrapper[4946]: I1204 16:12:25.877217 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:25 crc kubenswrapper[4946]: I1204 16:12:25.970222 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:26 crc kubenswrapper[4946]: I1204 16:12:26.383042 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:26 crc kubenswrapper[4946]: I1204 16:12:26.483648 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t7b87"] Dec 04 16:12:28 crc kubenswrapper[4946]: I1204 16:12:28.341872 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-t7b87" podUID="f9953c2c-353a-4103-9061-795f29b8c9a9" containerName="registry-server" containerID="cri-o://2519e2f0b721d491b8efadce090a681296cf3984c3caf95cafee45594f0ad962" gracePeriod=2 Dec 04 16:12:29 crc kubenswrapper[4946]: I1204 16:12:29.356525 4946 generic.go:334] "Generic (PLEG): container finished" podID="f9953c2c-353a-4103-9061-795f29b8c9a9" containerID="2519e2f0b721d491b8efadce090a681296cf3984c3caf95cafee45594f0ad962" exitCode=0 Dec 04 16:12:29 crc kubenswrapper[4946]: I1204 16:12:29.356585 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7b87" event={"ID":"f9953c2c-353a-4103-9061-795f29b8c9a9","Type":"ContainerDied","Data":"2519e2f0b721d491b8efadce090a681296cf3984c3caf95cafee45594f0ad962"} Dec 04 16:12:29 crc kubenswrapper[4946]: E1204 16:12:29.468455 4946 info.go:109] Failed to get network devices: open /sys/class/net/35fc73a41951e0b/address: no such file or directory Dec 04 16:12:29 crc kubenswrapper[4946]: I1204 16:12:29.840628 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:29 crc kubenswrapper[4946]: I1204 16:12:29.977635 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-utilities\") pod \"f9953c2c-353a-4103-9061-795f29b8c9a9\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " Dec 04 16:12:29 crc kubenswrapper[4946]: I1204 16:12:29.977897 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d29m9\" (UniqueName: \"kubernetes.io/projected/f9953c2c-353a-4103-9061-795f29b8c9a9-kube-api-access-d29m9\") pod \"f9953c2c-353a-4103-9061-795f29b8c9a9\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " Dec 04 16:12:29 crc kubenswrapper[4946]: I1204 16:12:29.977933 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-catalog-content\") pod \"f9953c2c-353a-4103-9061-795f29b8c9a9\" (UID: \"f9953c2c-353a-4103-9061-795f29b8c9a9\") " Dec 04 16:12:29 crc kubenswrapper[4946]: I1204 16:12:29.978918 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-utilities" (OuterVolumeSpecName: "utilities") pod "f9953c2c-353a-4103-9061-795f29b8c9a9" (UID: "f9953c2c-353a-4103-9061-795f29b8c9a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:12:29 crc kubenswrapper[4946]: I1204 16:12:29.987419 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9953c2c-353a-4103-9061-795f29b8c9a9-kube-api-access-d29m9" (OuterVolumeSpecName: "kube-api-access-d29m9") pod "f9953c2c-353a-4103-9061-795f29b8c9a9" (UID: "f9953c2c-353a-4103-9061-795f29b8c9a9"). InnerVolumeSpecName "kube-api-access-d29m9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:12:30 crc kubenswrapper[4946]: I1204 16:12:30.054724 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9953c2c-353a-4103-9061-795f29b8c9a9" (UID: "f9953c2c-353a-4103-9061-795f29b8c9a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:12:30 crc kubenswrapper[4946]: I1204 16:12:30.081003 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d29m9\" (UniqueName: \"kubernetes.io/projected/f9953c2c-353a-4103-9061-795f29b8c9a9-kube-api-access-d29m9\") on node \"crc\" DevicePath \"\"" Dec 04 16:12:30 crc kubenswrapper[4946]: I1204 16:12:30.081390 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 16:12:30 crc kubenswrapper[4946]: I1204 16:12:30.081404 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9953c2c-353a-4103-9061-795f29b8c9a9-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 16:12:30 crc kubenswrapper[4946]: I1204 16:12:30.370905 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7b87" event={"ID":"f9953c2c-353a-4103-9061-795f29b8c9a9","Type":"ContainerDied","Data":"35fc73a41951e0b5a8a0bfaf01d02d97ab9cc86cad1b7ca42263c01459fe7551"} Dec 04 16:12:30 crc kubenswrapper[4946]: I1204 16:12:30.370983 4946 scope.go:117] "RemoveContainer" containerID="2519e2f0b721d491b8efadce090a681296cf3984c3caf95cafee45594f0ad962" Dec 04 16:12:30 crc kubenswrapper[4946]: I1204 16:12:30.371006 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t7b87" Dec 04 16:12:30 crc kubenswrapper[4946]: I1204 16:12:30.398447 4946 scope.go:117] "RemoveContainer" containerID="7e689c13733a16a12ac7b9edfd2756c22e96b8b6a0a8829e084ec5c751bf56ab" Dec 04 16:12:30 crc kubenswrapper[4946]: I1204 16:12:30.416980 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t7b87"] Dec 04 16:12:30 crc kubenswrapper[4946]: I1204 16:12:30.426974 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-t7b87"] Dec 04 16:12:30 crc kubenswrapper[4946]: I1204 16:12:30.446281 4946 scope.go:117] "RemoveContainer" containerID="7ad4b30910b5a3d7eb95888b9fd9bb37b4f06eff3884ac3810e029c894e0c5c7" Dec 04 16:12:31 crc kubenswrapper[4946]: I1204 16:12:31.464971 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9953c2c-353a-4103-9061-795f29b8c9a9" path="/var/lib/kubelet/pods/f9953c2c-353a-4103-9061-795f29b8c9a9/volumes" Dec 04 16:12:52 crc kubenswrapper[4946]: I1204 16:12:52.479536 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:12:52 crc kubenswrapper[4946]: I1204 16:12:52.480860 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:12:52 crc kubenswrapper[4946]: I1204 16:12:52.480958 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 16:12:52 crc kubenswrapper[4946]: I1204 16:12:52.482806 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"43c4931322976c77430de96205a2c957b71c7e81d18e6e2e9fc9b080b7e4614f"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 16:12:52 crc kubenswrapper[4946]: I1204 16:12:52.482882 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://43c4931322976c77430de96205a2c957b71c7e81d18e6e2e9fc9b080b7e4614f" gracePeriod=600 Dec 04 16:12:52 crc kubenswrapper[4946]: I1204 16:12:52.637111 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="43c4931322976c77430de96205a2c957b71c7e81d18e6e2e9fc9b080b7e4614f" exitCode=0 Dec 04 16:12:52 crc kubenswrapper[4946]: I1204 16:12:52.637216 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"43c4931322976c77430de96205a2c957b71c7e81d18e6e2e9fc9b080b7e4614f"} Dec 04 16:12:52 crc kubenswrapper[4946]: I1204 16:12:52.637263 4946 scope.go:117] "RemoveContainer" containerID="4958482caec78955497e99046a9c98b4b92f5ca8b964644b6caccefa9cc520d0" Dec 04 16:12:53 crc kubenswrapper[4946]: I1204 16:12:53.650805 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3"} Dec 04 16:13:54 crc kubenswrapper[4946]: I1204 16:13:54.788281 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tbv2b/must-gather-zn5k2"] Dec 04 16:13:54 crc kubenswrapper[4946]: E1204 16:13:54.789741 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9953c2c-353a-4103-9061-795f29b8c9a9" containerName="registry-server" Dec 04 16:13:54 crc kubenswrapper[4946]: I1204 16:13:54.789760 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9953c2c-353a-4103-9061-795f29b8c9a9" containerName="registry-server" Dec 04 16:13:54 crc kubenswrapper[4946]: E1204 16:13:54.789797 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9953c2c-353a-4103-9061-795f29b8c9a9" containerName="extract-utilities" Dec 04 16:13:54 crc kubenswrapper[4946]: I1204 16:13:54.789807 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9953c2c-353a-4103-9061-795f29b8c9a9" containerName="extract-utilities" Dec 04 16:13:54 crc kubenswrapper[4946]: E1204 16:13:54.789835 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9953c2c-353a-4103-9061-795f29b8c9a9" containerName="extract-content" Dec 04 16:13:54 crc kubenswrapper[4946]: I1204 16:13:54.789844 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9953c2c-353a-4103-9061-795f29b8c9a9" containerName="extract-content" Dec 04 16:13:54 crc kubenswrapper[4946]: I1204 16:13:54.790158 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9953c2c-353a-4103-9061-795f29b8c9a9" containerName="registry-server" Dec 04 16:13:54 crc kubenswrapper[4946]: I1204 16:13:54.791925 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/must-gather-zn5k2" Dec 04 16:13:54 crc kubenswrapper[4946]: I1204 16:13:54.795838 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-tbv2b"/"openshift-service-ca.crt" Dec 04 16:13:54 crc kubenswrapper[4946]: I1204 16:13:54.796173 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-tbv2b"/"kube-root-ca.crt" Dec 04 16:13:54 crc kubenswrapper[4946]: I1204 16:13:54.831300 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-tbv2b/must-gather-zn5k2"] Dec 04 16:13:54 crc kubenswrapper[4946]: I1204 16:13:54.917881 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-must-gather-output\") pod \"must-gather-zn5k2\" (UID: \"40d14c1e-d816-4bcb-a55f-0be1ddb906f2\") " pod="openshift-must-gather-tbv2b/must-gather-zn5k2" Dec 04 16:13:54 crc kubenswrapper[4946]: I1204 16:13:54.918092 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b94q\" (UniqueName: \"kubernetes.io/projected/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-kube-api-access-2b94q\") pod \"must-gather-zn5k2\" (UID: \"40d14c1e-d816-4bcb-a55f-0be1ddb906f2\") " pod="openshift-must-gather-tbv2b/must-gather-zn5k2" Dec 04 16:13:55 crc kubenswrapper[4946]: I1204 16:13:55.021820 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-must-gather-output\") pod \"must-gather-zn5k2\" (UID: \"40d14c1e-d816-4bcb-a55f-0be1ddb906f2\") " pod="openshift-must-gather-tbv2b/must-gather-zn5k2" Dec 04 16:13:55 crc kubenswrapper[4946]: I1204 16:13:55.021949 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b94q\" (UniqueName: \"kubernetes.io/projected/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-kube-api-access-2b94q\") pod \"must-gather-zn5k2\" (UID: \"40d14c1e-d816-4bcb-a55f-0be1ddb906f2\") " pod="openshift-must-gather-tbv2b/must-gather-zn5k2" Dec 04 16:13:55 crc kubenswrapper[4946]: I1204 16:13:55.022568 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-must-gather-output\") pod \"must-gather-zn5k2\" (UID: \"40d14c1e-d816-4bcb-a55f-0be1ddb906f2\") " pod="openshift-must-gather-tbv2b/must-gather-zn5k2" Dec 04 16:13:55 crc kubenswrapper[4946]: I1204 16:13:55.045777 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b94q\" (UniqueName: \"kubernetes.io/projected/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-kube-api-access-2b94q\") pod \"must-gather-zn5k2\" (UID: \"40d14c1e-d816-4bcb-a55f-0be1ddb906f2\") " pod="openshift-must-gather-tbv2b/must-gather-zn5k2" Dec 04 16:13:55 crc kubenswrapper[4946]: I1204 16:13:55.138725 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/must-gather-zn5k2" Dec 04 16:13:55 crc kubenswrapper[4946]: I1204 16:13:55.661180 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-tbv2b/must-gather-zn5k2"] Dec 04 16:13:56 crc kubenswrapper[4946]: I1204 16:13:56.484623 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tbv2b/must-gather-zn5k2" event={"ID":"40d14c1e-d816-4bcb-a55f-0be1ddb906f2","Type":"ContainerStarted","Data":"0b55d21fcc1ef16f81b60bb70e3a722dce3823eb61e29af6b6f2a31429fa167b"} Dec 04 16:13:56 crc kubenswrapper[4946]: I1204 16:13:56.485357 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tbv2b/must-gather-zn5k2" event={"ID":"40d14c1e-d816-4bcb-a55f-0be1ddb906f2","Type":"ContainerStarted","Data":"6e95e1a96f7b3326777e4039ac2ededd4eba7438fb89e0fa2988abf5694584ed"} Dec 04 16:13:56 crc kubenswrapper[4946]: I1204 16:13:56.485372 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tbv2b/must-gather-zn5k2" event={"ID":"40d14c1e-d816-4bcb-a55f-0be1ddb906f2","Type":"ContainerStarted","Data":"d054020738442d065dd79b3bf9020c1af4eed70c3a1c54114eac180e99ddb016"} Dec 04 16:13:56 crc kubenswrapper[4946]: I1204 16:13:56.520873 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-tbv2b/must-gather-zn5k2" podStartSLOduration=2.520847911 podStartE2EDuration="2.520847911s" podCreationTimestamp="2025-12-04 16:13:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 16:13:56.516523096 +0000 UTC m=+4287.402566737" watchObservedRunningTime="2025-12-04 16:13:56.520847911 +0000 UTC m=+4287.406891542" Dec 04 16:14:01 crc kubenswrapper[4946]: I1204 16:14:01.606499 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tbv2b/crc-debug-fg994"] Dec 04 16:14:01 crc kubenswrapper[4946]: I1204 16:14:01.609936 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-fg994" Dec 04 16:14:01 crc kubenswrapper[4946]: I1204 16:14:01.612815 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-tbv2b"/"default-dockercfg-fnccm" Dec 04 16:14:01 crc kubenswrapper[4946]: I1204 16:14:01.699783 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x74w\" (UniqueName: \"kubernetes.io/projected/946ac6b1-ef96-4b90-83d6-968318244744-kube-api-access-8x74w\") pod \"crc-debug-fg994\" (UID: \"946ac6b1-ef96-4b90-83d6-968318244744\") " pod="openshift-must-gather-tbv2b/crc-debug-fg994" Dec 04 16:14:01 crc kubenswrapper[4946]: I1204 16:14:01.699847 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/946ac6b1-ef96-4b90-83d6-968318244744-host\") pod \"crc-debug-fg994\" (UID: \"946ac6b1-ef96-4b90-83d6-968318244744\") " pod="openshift-must-gather-tbv2b/crc-debug-fg994" Dec 04 16:14:01 crc kubenswrapper[4946]: I1204 16:14:01.802226 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x74w\" (UniqueName: \"kubernetes.io/projected/946ac6b1-ef96-4b90-83d6-968318244744-kube-api-access-8x74w\") pod \"crc-debug-fg994\" (UID: \"946ac6b1-ef96-4b90-83d6-968318244744\") " pod="openshift-must-gather-tbv2b/crc-debug-fg994" Dec 04 16:14:01 crc kubenswrapper[4946]: I1204 16:14:01.802282 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/946ac6b1-ef96-4b90-83d6-968318244744-host\") pod \"crc-debug-fg994\" (UID: \"946ac6b1-ef96-4b90-83d6-968318244744\") " pod="openshift-must-gather-tbv2b/crc-debug-fg994" Dec 04 16:14:01 crc kubenswrapper[4946]: I1204 16:14:01.802474 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/946ac6b1-ef96-4b90-83d6-968318244744-host\") pod \"crc-debug-fg994\" (UID: \"946ac6b1-ef96-4b90-83d6-968318244744\") " pod="openshift-must-gather-tbv2b/crc-debug-fg994" Dec 04 16:14:01 crc kubenswrapper[4946]: I1204 16:14:01.830738 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x74w\" (UniqueName: \"kubernetes.io/projected/946ac6b1-ef96-4b90-83d6-968318244744-kube-api-access-8x74w\") pod \"crc-debug-fg994\" (UID: \"946ac6b1-ef96-4b90-83d6-968318244744\") " pod="openshift-must-gather-tbv2b/crc-debug-fg994" Dec 04 16:14:01 crc kubenswrapper[4946]: I1204 16:14:01.935393 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-fg994" Dec 04 16:14:02 crc kubenswrapper[4946]: I1204 16:14:02.606969 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tbv2b/crc-debug-fg994" event={"ID":"946ac6b1-ef96-4b90-83d6-968318244744","Type":"ContainerStarted","Data":"9a22eb297c8344d6814f798e89dfa5a6e6157b5e77e80c321fef37714283ab06"} Dec 04 16:14:02 crc kubenswrapper[4946]: I1204 16:14:02.607813 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tbv2b/crc-debug-fg994" event={"ID":"946ac6b1-ef96-4b90-83d6-968318244744","Type":"ContainerStarted","Data":"2eb7fbca382f41775b38a0b153e6898a7b03f3b97591244cdb8556fc09b465ac"} Dec 04 16:14:02 crc kubenswrapper[4946]: I1204 16:14:02.639236 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-tbv2b/crc-debug-fg994" podStartSLOduration=1.6392000420000001 podStartE2EDuration="1.639200042s" podCreationTimestamp="2025-12-04 16:14:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 16:14:02.622732173 +0000 UTC m=+4293.508775824" watchObservedRunningTime="2025-12-04 16:14:02.639200042 +0000 UTC m=+4293.525243693" Dec 04 16:14:52 crc kubenswrapper[4946]: I1204 16:14:52.480894 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:14:52 crc kubenswrapper[4946]: I1204 16:14:52.481903 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:14:57 crc kubenswrapper[4946]: I1204 16:14:57.225504 4946 generic.go:334] "Generic (PLEG): container finished" podID="946ac6b1-ef96-4b90-83d6-968318244744" containerID="9a22eb297c8344d6814f798e89dfa5a6e6157b5e77e80c321fef37714283ab06" exitCode=0 Dec 04 16:14:57 crc kubenswrapper[4946]: I1204 16:14:57.225605 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tbv2b/crc-debug-fg994" event={"ID":"946ac6b1-ef96-4b90-83d6-968318244744","Type":"ContainerDied","Data":"9a22eb297c8344d6814f798e89dfa5a6e6157b5e77e80c321fef37714283ab06"} Dec 04 16:14:58 crc kubenswrapper[4946]: I1204 16:14:58.893439 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-fg994" Dec 04 16:14:58 crc kubenswrapper[4946]: I1204 16:14:58.932396 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tbv2b/crc-debug-fg994"] Dec 04 16:14:58 crc kubenswrapper[4946]: I1204 16:14:58.943844 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tbv2b/crc-debug-fg994"] Dec 04 16:14:59 crc kubenswrapper[4946]: I1204 16:14:59.053480 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x74w\" (UniqueName: \"kubernetes.io/projected/946ac6b1-ef96-4b90-83d6-968318244744-kube-api-access-8x74w\") pod \"946ac6b1-ef96-4b90-83d6-968318244744\" (UID: \"946ac6b1-ef96-4b90-83d6-968318244744\") " Dec 04 16:14:59 crc kubenswrapper[4946]: I1204 16:14:59.054052 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/946ac6b1-ef96-4b90-83d6-968318244744-host\") pod \"946ac6b1-ef96-4b90-83d6-968318244744\" (UID: \"946ac6b1-ef96-4b90-83d6-968318244744\") " Dec 04 16:14:59 crc kubenswrapper[4946]: I1204 16:14:59.054127 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/946ac6b1-ef96-4b90-83d6-968318244744-host" (OuterVolumeSpecName: "host") pod "946ac6b1-ef96-4b90-83d6-968318244744" (UID: "946ac6b1-ef96-4b90-83d6-968318244744"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 16:14:59 crc kubenswrapper[4946]: I1204 16:14:59.054983 4946 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/946ac6b1-ef96-4b90-83d6-968318244744-host\") on node \"crc\" DevicePath \"\"" Dec 04 16:14:59 crc kubenswrapper[4946]: I1204 16:14:59.063560 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/946ac6b1-ef96-4b90-83d6-968318244744-kube-api-access-8x74w" (OuterVolumeSpecName: "kube-api-access-8x74w") pod "946ac6b1-ef96-4b90-83d6-968318244744" (UID: "946ac6b1-ef96-4b90-83d6-968318244744"). InnerVolumeSpecName "kube-api-access-8x74w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:14:59 crc kubenswrapper[4946]: I1204 16:14:59.157607 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x74w\" (UniqueName: \"kubernetes.io/projected/946ac6b1-ef96-4b90-83d6-968318244744-kube-api-access-8x74w\") on node \"crc\" DevicePath \"\"" Dec 04 16:14:59 crc kubenswrapper[4946]: I1204 16:14:59.255520 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2eb7fbca382f41775b38a0b153e6898a7b03f3b97591244cdb8556fc09b465ac" Dec 04 16:14:59 crc kubenswrapper[4946]: I1204 16:14:59.255598 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-fg994" Dec 04 16:14:59 crc kubenswrapper[4946]: I1204 16:14:59.479945 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="946ac6b1-ef96-4b90-83d6-968318244744" path="/var/lib/kubelet/pods/946ac6b1-ef96-4b90-83d6-968318244744/volumes" Dec 04 16:14:59 crc kubenswrapper[4946]: E1204 16:14:59.520515 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod946ac6b1_ef96_4b90_83d6_968318244744.slice/crio-2eb7fbca382f41775b38a0b153e6898a7b03f3b97591244cdb8556fc09b465ac\": RecentStats: unable to find data in memory cache]" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.224628 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf"] Dec 04 16:15:00 crc kubenswrapper[4946]: E1204 16:15:00.230995 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="946ac6b1-ef96-4b90-83d6-968318244744" containerName="container-00" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.231148 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="946ac6b1-ef96-4b90-83d6-968318244744" containerName="container-00" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.231537 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="946ac6b1-ef96-4b90-83d6-968318244744" containerName="container-00" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.232784 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.237699 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.237949 4946 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.244915 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf"] Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.287686 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c05575fd-7259-4a99-9100-456f0277bd4d-secret-volume\") pod \"collect-profiles-29414415-n47hf\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.287760 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv94h\" (UniqueName: \"kubernetes.io/projected/c05575fd-7259-4a99-9100-456f0277bd4d-kube-api-access-pv94h\") pod \"collect-profiles-29414415-n47hf\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.287854 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c05575fd-7259-4a99-9100-456f0277bd4d-config-volume\") pod \"collect-profiles-29414415-n47hf\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.390921 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c05575fd-7259-4a99-9100-456f0277bd4d-config-volume\") pod \"collect-profiles-29414415-n47hf\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.391181 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c05575fd-7259-4a99-9100-456f0277bd4d-secret-volume\") pod \"collect-profiles-29414415-n47hf\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.391216 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv94h\" (UniqueName: \"kubernetes.io/projected/c05575fd-7259-4a99-9100-456f0277bd4d-kube-api-access-pv94h\") pod \"collect-profiles-29414415-n47hf\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.392061 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c05575fd-7259-4a99-9100-456f0277bd4d-config-volume\") pod \"collect-profiles-29414415-n47hf\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.406516 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c05575fd-7259-4a99-9100-456f0277bd4d-secret-volume\") pod \"collect-profiles-29414415-n47hf\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.419066 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv94h\" (UniqueName: \"kubernetes.io/projected/c05575fd-7259-4a99-9100-456f0277bd4d-kube-api-access-pv94h\") pod \"collect-profiles-29414415-n47hf\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.587905 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.795209 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tbv2b/crc-debug-2qwk4"] Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.812174 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.816854 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-tbv2b"/"default-dockercfg-fnccm" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.910682 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-host\") pod \"crc-debug-2qwk4\" (UID: \"46ce5239-cb0d-4caa-80cc-d10b7513b5e0\") " pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" Dec 04 16:15:00 crc kubenswrapper[4946]: I1204 16:15:00.911139 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9vbd\" (UniqueName: \"kubernetes.io/projected/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-kube-api-access-w9vbd\") pod \"crc-debug-2qwk4\" (UID: \"46ce5239-cb0d-4caa-80cc-d10b7513b5e0\") " pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" Dec 04 16:15:01 crc kubenswrapper[4946]: I1204 16:15:01.015055 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9vbd\" (UniqueName: \"kubernetes.io/projected/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-kube-api-access-w9vbd\") pod \"crc-debug-2qwk4\" (UID: \"46ce5239-cb0d-4caa-80cc-d10b7513b5e0\") " pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" Dec 04 16:15:01 crc kubenswrapper[4946]: I1204 16:15:01.015842 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-host\") pod \"crc-debug-2qwk4\" (UID: \"46ce5239-cb0d-4caa-80cc-d10b7513b5e0\") " pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" Dec 04 16:15:01 crc kubenswrapper[4946]: I1204 16:15:01.016025 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-host\") pod \"crc-debug-2qwk4\" (UID: \"46ce5239-cb0d-4caa-80cc-d10b7513b5e0\") " pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" Dec 04 16:15:01 crc kubenswrapper[4946]: I1204 16:15:01.052666 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9vbd\" (UniqueName: \"kubernetes.io/projected/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-kube-api-access-w9vbd\") pod \"crc-debug-2qwk4\" (UID: \"46ce5239-cb0d-4caa-80cc-d10b7513b5e0\") " pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" Dec 04 16:15:01 crc kubenswrapper[4946]: I1204 16:15:01.162500 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" Dec 04 16:15:01 crc kubenswrapper[4946]: I1204 16:15:01.176012 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf"] Dec 04 16:15:01 crc kubenswrapper[4946]: W1204 16:15:01.199078 4946 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46ce5239_cb0d_4caa_80cc_d10b7513b5e0.slice/crio-13886e7a168959ddd85028537405ef2e4420e3c500c5bd717cc635f82703dd8e WatchSource:0}: Error finding container 13886e7a168959ddd85028537405ef2e4420e3c500c5bd717cc635f82703dd8e: Status 404 returned error can't find the container with id 13886e7a168959ddd85028537405ef2e4420e3c500c5bd717cc635f82703dd8e Dec 04 16:15:01 crc kubenswrapper[4946]: I1204 16:15:01.304632 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" event={"ID":"46ce5239-cb0d-4caa-80cc-d10b7513b5e0","Type":"ContainerStarted","Data":"13886e7a168959ddd85028537405ef2e4420e3c500c5bd717cc635f82703dd8e"} Dec 04 16:15:01 crc kubenswrapper[4946]: I1204 16:15:01.305718 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" event={"ID":"c05575fd-7259-4a99-9100-456f0277bd4d","Type":"ContainerStarted","Data":"b58f25c42dd01a19acc70915293ded0d94f291e89caa3aa06202c69acaf9d2e2"} Dec 04 16:15:02 crc kubenswrapper[4946]: I1204 16:15:02.318265 4946 generic.go:334] "Generic (PLEG): container finished" podID="46ce5239-cb0d-4caa-80cc-d10b7513b5e0" containerID="fb11d1dc211c9a27d9e9fcb88d51b26e81a157a94e489a4a46590848220dd675" exitCode=0 Dec 04 16:15:02 crc kubenswrapper[4946]: I1204 16:15:02.318374 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" event={"ID":"46ce5239-cb0d-4caa-80cc-d10b7513b5e0","Type":"ContainerDied","Data":"fb11d1dc211c9a27d9e9fcb88d51b26e81a157a94e489a4a46590848220dd675"} Dec 04 16:15:02 crc kubenswrapper[4946]: I1204 16:15:02.322570 4946 generic.go:334] "Generic (PLEG): container finished" podID="c05575fd-7259-4a99-9100-456f0277bd4d" containerID="371978c6bdd8b90957622f8d7363820f38dd80913c58095e733fce28cff56646" exitCode=0 Dec 04 16:15:02 crc kubenswrapper[4946]: I1204 16:15:02.322614 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" event={"ID":"c05575fd-7259-4a99-9100-456f0277bd4d","Type":"ContainerDied","Data":"371978c6bdd8b90957622f8d7363820f38dd80913c58095e733fce28cff56646"} Dec 04 16:15:03 crc kubenswrapper[4946]: I1204 16:15:03.486070 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" Dec 04 16:15:03 crc kubenswrapper[4946]: I1204 16:15:03.581068 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-host\") pod \"46ce5239-cb0d-4caa-80cc-d10b7513b5e0\" (UID: \"46ce5239-cb0d-4caa-80cc-d10b7513b5e0\") " Dec 04 16:15:03 crc kubenswrapper[4946]: I1204 16:15:03.581645 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9vbd\" (UniqueName: \"kubernetes.io/projected/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-kube-api-access-w9vbd\") pod \"46ce5239-cb0d-4caa-80cc-d10b7513b5e0\" (UID: \"46ce5239-cb0d-4caa-80cc-d10b7513b5e0\") " Dec 04 16:15:03 crc kubenswrapper[4946]: I1204 16:15:03.581402 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-host" (OuterVolumeSpecName: "host") pod "46ce5239-cb0d-4caa-80cc-d10b7513b5e0" (UID: "46ce5239-cb0d-4caa-80cc-d10b7513b5e0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 16:15:03 crc kubenswrapper[4946]: I1204 16:15:03.586212 4946 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-host\") on node \"crc\" DevicePath \"\"" Dec 04 16:15:03 crc kubenswrapper[4946]: I1204 16:15:03.592172 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-kube-api-access-w9vbd" (OuterVolumeSpecName: "kube-api-access-w9vbd") pod "46ce5239-cb0d-4caa-80cc-d10b7513b5e0" (UID: "46ce5239-cb0d-4caa-80cc-d10b7513b5e0"). InnerVolumeSpecName "kube-api-access-w9vbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:15:03 crc kubenswrapper[4946]: I1204 16:15:03.687969 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tbv2b/crc-debug-2qwk4"] Dec 04 16:15:03 crc kubenswrapper[4946]: I1204 16:15:03.695950 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9vbd\" (UniqueName: \"kubernetes.io/projected/46ce5239-cb0d-4caa-80cc-d10b7513b5e0-kube-api-access-w9vbd\") on node \"crc\" DevicePath \"\"" Dec 04 16:15:03 crc kubenswrapper[4946]: I1204 16:15:03.713149 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tbv2b/crc-debug-2qwk4"] Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.131314 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.215038 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c05575fd-7259-4a99-9100-456f0277bd4d-secret-volume\") pod \"c05575fd-7259-4a99-9100-456f0277bd4d\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.215889 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c05575fd-7259-4a99-9100-456f0277bd4d-config-volume\") pod \"c05575fd-7259-4a99-9100-456f0277bd4d\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.216069 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pv94h\" (UniqueName: \"kubernetes.io/projected/c05575fd-7259-4a99-9100-456f0277bd4d-kube-api-access-pv94h\") pod \"c05575fd-7259-4a99-9100-456f0277bd4d\" (UID: \"c05575fd-7259-4a99-9100-456f0277bd4d\") " Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.219951 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c05575fd-7259-4a99-9100-456f0277bd4d-config-volume" (OuterVolumeSpecName: "config-volume") pod "c05575fd-7259-4a99-9100-456f0277bd4d" (UID: "c05575fd-7259-4a99-9100-456f0277bd4d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.234418 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c05575fd-7259-4a99-9100-456f0277bd4d-kube-api-access-pv94h" (OuterVolumeSpecName: "kube-api-access-pv94h") pod "c05575fd-7259-4a99-9100-456f0277bd4d" (UID: "c05575fd-7259-4a99-9100-456f0277bd4d"). InnerVolumeSpecName "kube-api-access-pv94h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.234423 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c05575fd-7259-4a99-9100-456f0277bd4d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c05575fd-7259-4a99-9100-456f0277bd4d" (UID: "c05575fd-7259-4a99-9100-456f0277bd4d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.319873 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pv94h\" (UniqueName: \"kubernetes.io/projected/c05575fd-7259-4a99-9100-456f0277bd4d-kube-api-access-pv94h\") on node \"crc\" DevicePath \"\"" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.319921 4946 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c05575fd-7259-4a99-9100-456f0277bd4d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.319934 4946 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c05575fd-7259-4a99-9100-456f0277bd4d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.358908 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13886e7a168959ddd85028537405ef2e4420e3c500c5bd717cc635f82703dd8e" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.359027 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-2qwk4" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.368736 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" event={"ID":"c05575fd-7259-4a99-9100-456f0277bd4d","Type":"ContainerDied","Data":"b58f25c42dd01a19acc70915293ded0d94f291e89caa3aa06202c69acaf9d2e2"} Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.368795 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b58f25c42dd01a19acc70915293ded0d94f291e89caa3aa06202c69acaf9d2e2" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.368860 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414415-n47hf" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.888751 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tbv2b/crc-debug-qbh5v"] Dec 04 16:15:04 crc kubenswrapper[4946]: E1204 16:15:04.889718 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05575fd-7259-4a99-9100-456f0277bd4d" containerName="collect-profiles" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.889733 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05575fd-7259-4a99-9100-456f0277bd4d" containerName="collect-profiles" Dec 04 16:15:04 crc kubenswrapper[4946]: E1204 16:15:04.889763 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46ce5239-cb0d-4caa-80cc-d10b7513b5e0" containerName="container-00" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.889769 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="46ce5239-cb0d-4caa-80cc-d10b7513b5e0" containerName="container-00" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.890037 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="c05575fd-7259-4a99-9100-456f0277bd4d" containerName="collect-profiles" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.890050 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="46ce5239-cb0d-4caa-80cc-d10b7513b5e0" containerName="container-00" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.891002 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" Dec 04 16:15:04 crc kubenswrapper[4946]: I1204 16:15:04.895080 4946 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-tbv2b"/"default-dockercfg-fnccm" Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.037629 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2p7h\" (UniqueName: \"kubernetes.io/projected/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-kube-api-access-r2p7h\") pod \"crc-debug-qbh5v\" (UID: \"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f\") " pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.037945 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-host\") pod \"crc-debug-qbh5v\" (UID: \"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f\") " pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.141023 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2p7h\" (UniqueName: \"kubernetes.io/projected/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-kube-api-access-r2p7h\") pod \"crc-debug-qbh5v\" (UID: \"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f\") " pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.141428 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-host\") pod \"crc-debug-qbh5v\" (UID: \"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f\") " pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.141590 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-host\") pod \"crc-debug-qbh5v\" (UID: \"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f\") " pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.176930 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2p7h\" (UniqueName: \"kubernetes.io/projected/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-kube-api-access-r2p7h\") pod \"crc-debug-qbh5v\" (UID: \"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f\") " pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.218719 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.249369 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv"] Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.260019 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414370-x5tjv"] Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.383090 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" event={"ID":"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f","Type":"ContainerStarted","Data":"ee8f535176604387413be8484f2c16c43804efa8cdaf9fef3d993cce2e53fcc7"} Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.466227 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46ce5239-cb0d-4caa-80cc-d10b7513b5e0" path="/var/lib/kubelet/pods/46ce5239-cb0d-4caa-80cc-d10b7513b5e0/volumes" Dec 04 16:15:05 crc kubenswrapper[4946]: I1204 16:15:05.466967 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a29898d0-c1ba-435d-b43c-337236b65e84" path="/var/lib/kubelet/pods/a29898d0-c1ba-435d-b43c-337236b65e84/volumes" Dec 04 16:15:06 crc kubenswrapper[4946]: I1204 16:15:06.397282 4946 generic.go:334] "Generic (PLEG): container finished" podID="8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f" containerID="c40dd3a033191df99d668b65bcaab5fd9870f4fc9412b935aa5d1c8849ca3cd3" exitCode=0 Dec 04 16:15:06 crc kubenswrapper[4946]: I1204 16:15:06.397345 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" event={"ID":"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f","Type":"ContainerDied","Data":"c40dd3a033191df99d668b65bcaab5fd9870f4fc9412b935aa5d1c8849ca3cd3"} Dec 04 16:15:06 crc kubenswrapper[4946]: I1204 16:15:06.472517 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tbv2b/crc-debug-qbh5v"] Dec 04 16:15:06 crc kubenswrapper[4946]: I1204 16:15:06.486607 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tbv2b/crc-debug-qbh5v"] Dec 04 16:15:07 crc kubenswrapper[4946]: I1204 16:15:07.563727 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" Dec 04 16:15:07 crc kubenswrapper[4946]: I1204 16:15:07.703774 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2p7h\" (UniqueName: \"kubernetes.io/projected/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-kube-api-access-r2p7h\") pod \"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f\" (UID: \"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f\") " Dec 04 16:15:07 crc kubenswrapper[4946]: I1204 16:15:07.703962 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-host\") pod \"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f\" (UID: \"8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f\") " Dec 04 16:15:07 crc kubenswrapper[4946]: I1204 16:15:07.704144 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-host" (OuterVolumeSpecName: "host") pod "8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f" (UID: "8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 16:15:07 crc kubenswrapper[4946]: I1204 16:15:07.704817 4946 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-host\") on node \"crc\" DevicePath \"\"" Dec 04 16:15:07 crc kubenswrapper[4946]: I1204 16:15:07.720477 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-kube-api-access-r2p7h" (OuterVolumeSpecName: "kube-api-access-r2p7h") pod "8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f" (UID: "8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f"). InnerVolumeSpecName "kube-api-access-r2p7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:15:07 crc kubenswrapper[4946]: I1204 16:15:07.807017 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2p7h\" (UniqueName: \"kubernetes.io/projected/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f-kube-api-access-r2p7h\") on node \"crc\" DevicePath \"\"" Dec 04 16:15:08 crc kubenswrapper[4946]: I1204 16:15:08.422010 4946 scope.go:117] "RemoveContainer" containerID="c40dd3a033191df99d668b65bcaab5fd9870f4fc9412b935aa5d1c8849ca3cd3" Dec 04 16:15:08 crc kubenswrapper[4946]: I1204 16:15:08.422031 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/crc-debug-qbh5v" Dec 04 16:15:09 crc kubenswrapper[4946]: I1204 16:15:09.465557 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f" path="/var/lib/kubelet/pods/8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f/volumes" Dec 04 16:15:22 crc kubenswrapper[4946]: I1204 16:15:22.479438 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:15:22 crc kubenswrapper[4946]: I1204 16:15:22.480320 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:15:52 crc kubenswrapper[4946]: I1204 16:15:52.479648 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:15:52 crc kubenswrapper[4946]: I1204 16:15:52.480635 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:15:52 crc kubenswrapper[4946]: I1204 16:15:52.480753 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 16:15:52 crc kubenswrapper[4946]: I1204 16:15:52.482230 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 16:15:52 crc kubenswrapper[4946]: I1204 16:15:52.482402 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" gracePeriod=600 Dec 04 16:15:52 crc kubenswrapper[4946]: E1204 16:15:52.619901 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:15:53 crc kubenswrapper[4946]: I1204 16:15:53.009593 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" exitCode=0 Dec 04 16:15:53 crc kubenswrapper[4946]: I1204 16:15:53.009645 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3"} Dec 04 16:15:53 crc kubenswrapper[4946]: I1204 16:15:53.009686 4946 scope.go:117] "RemoveContainer" containerID="43c4931322976c77430de96205a2c957b71c7e81d18e6e2e9fc9b080b7e4614f" Dec 04 16:15:53 crc kubenswrapper[4946]: I1204 16:15:53.010493 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:15:53 crc kubenswrapper[4946]: E1204 16:15:53.010876 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:15:53 crc kubenswrapper[4946]: I1204 16:15:53.852974 4946 scope.go:117] "RemoveContainer" containerID="ea80a56a38d5c371501103c9c4cad6015f0b91f338918f56fee498b0a208cad2" Dec 04 16:15:56 crc kubenswrapper[4946]: I1204 16:15:56.297298 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a304ef91-9673-43d6-8b91-0ba511961217/init-config-reloader/0.log" Dec 04 16:15:56 crc kubenswrapper[4946]: I1204 16:15:56.514609 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a304ef91-9673-43d6-8b91-0ba511961217/init-config-reloader/0.log" Dec 04 16:15:56 crc kubenswrapper[4946]: I1204 16:15:56.729771 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a304ef91-9673-43d6-8b91-0ba511961217/config-reloader/0.log" Dec 04 16:15:56 crc kubenswrapper[4946]: I1204 16:15:56.751586 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_a304ef91-9673-43d6-8b91-0ba511961217/alertmanager/0.log" Dec 04 16:15:56 crc kubenswrapper[4946]: I1204 16:15:56.921834 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-d6b8cfb46-xzwxx_6a807e28-4c6a-435c-b640-a11ae6770632/barbican-api/0.log" Dec 04 16:15:56 crc kubenswrapper[4946]: I1204 16:15:56.950782 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-d6b8cfb46-xzwxx_6a807e28-4c6a-435c-b640-a11ae6770632/barbican-api-log/0.log" Dec 04 16:15:57 crc kubenswrapper[4946]: I1204 16:15:57.023454 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5dbfff5fc8-dg589_71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2/barbican-keystone-listener/0.log" Dec 04 16:15:57 crc kubenswrapper[4946]: I1204 16:15:57.317791 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5d869cc959-j4wsw_47433338-b9cd-4b5d-beaf-e551ca335c0e/barbican-worker/0.log" Dec 04 16:15:57 crc kubenswrapper[4946]: I1204 16:15:57.331557 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5dbfff5fc8-dg589_71d3adb1-ab0f-4596-b8ca-96c9a6cdbea2/barbican-keystone-listener-log/0.log" Dec 04 16:15:57 crc kubenswrapper[4946]: I1204 16:15:57.649000 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5d869cc959-j4wsw_47433338-b9cd-4b5d-beaf-e551ca335c0e/barbican-worker-log/0.log" Dec 04 16:15:57 crc kubenswrapper[4946]: I1204 16:15:57.859195 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-78g6c_59863a34-23ab-44bb-be9a-dae51f8dd6c1/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:15:58 crc kubenswrapper[4946]: I1204 16:15:58.575709 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_655a0ab4-533d-4447-8656-72742f94f4a7/ceilometer-notification-agent/0.log" Dec 04 16:15:58 crc kubenswrapper[4946]: I1204 16:15:58.597957 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_655a0ab4-533d-4447-8656-72742f94f4a7/ceilometer-central-agent/0.log" Dec 04 16:15:58 crc kubenswrapper[4946]: I1204 16:15:58.668424 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_655a0ab4-533d-4447-8656-72742f94f4a7/proxy-httpd/0.log" Dec 04 16:15:58 crc kubenswrapper[4946]: I1204 16:15:58.778839 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_655a0ab4-533d-4447-8656-72742f94f4a7/sg-core/0.log" Dec 04 16:15:58 crc kubenswrapper[4946]: I1204 16:15:58.934273 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0203bca7-1453-4a7b-8597-5286d1d245b2/cinder-api/0.log" Dec 04 16:15:58 crc kubenswrapper[4946]: I1204 16:15:58.997590 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0203bca7-1453-4a7b-8597-5286d1d245b2/cinder-api-log/0.log" Dec 04 16:15:59 crc kubenswrapper[4946]: I1204 16:15:59.308345 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_fb133f81-1fe2-4e36-8663-8301e9373627/probe/0.log" Dec 04 16:15:59 crc kubenswrapper[4946]: I1204 16:15:59.332186 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_fb133f81-1fe2-4e36-8663-8301e9373627/cinder-scheduler/0.log" Dec 04 16:15:59 crc kubenswrapper[4946]: I1204 16:15:59.572345 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_602d77a3-3d2b-488d-ac47-74d9fd037d6c/cloudkitty-api-log/0.log" Dec 04 16:15:59 crc kubenswrapper[4946]: I1204 16:15:59.612973 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_602d77a3-3d2b-488d-ac47-74d9fd037d6c/cloudkitty-api/0.log" Dec 04 16:15:59 crc kubenswrapper[4946]: I1204 16:15:59.671278 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_47583dfd-ecd6-41d8-ac98-748683cd0ae5/loki-compactor/0.log" Dec 04 16:15:59 crc kubenswrapper[4946]: I1204 16:15:59.787595 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-664b687b54-76w8c_b0adb62a-e125-4612-8e57-74bab154a2c4/loki-distributor/0.log" Dec 04 16:15:59 crc kubenswrapper[4946]: I1204 16:15:59.891813 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-c6jcc_c72f3e47-f551-4d7e-8978-cf453bc9a80d/gateway/0.log" Dec 04 16:15:59 crc kubenswrapper[4946]: I1204 16:15:59.914909 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-jtbm6_2ffa4fa2-c466-47f5-bca6-613ec9e52779/gateway/0.log" Dec 04 16:16:00 crc kubenswrapper[4946]: I1204 16:16:00.145660 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_b21846fc-0f45-4cae-aea6-b4e3f33ec03a/loki-index-gateway/0.log" Dec 04 16:16:00 crc kubenswrapper[4946]: I1204 16:16:00.431515 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_decd9bb2-7749-48ff-b886-74e49bf5222d/loki-ingester/0.log" Dec 04 16:16:00 crc kubenswrapper[4946]: I1204 16:16:00.741899 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-7c8cd744d9-4fzfn_55a6559d-165f-4fb0-ac08-a0ba07d02cac/loki-query-frontend/0.log" Dec 04 16:16:01 crc kubenswrapper[4946]: I1204 16:16:01.131433 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-zgqlm_d2eb924b-02a3-41e8-b820-0a89c1420ebc/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:01 crc kubenswrapper[4946]: I1204 16:16:01.491392 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-cqfnb_3c768922-7c81-4021-ab76-fd151946e8fa/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:01 crc kubenswrapper[4946]: I1204 16:16:01.681829 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-5467947bf7-wwslq_02db9740-8e77-440b-95f9-6a2968cd39fe/loki-querier/0.log" Dec 04 16:16:01 crc kubenswrapper[4946]: I1204 16:16:01.769770 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-vglk4_d984a81d-2489-42fa-b527-8962119b7dc5/init/0.log" Dec 04 16:16:01 crc kubenswrapper[4946]: I1204 16:16:01.923338 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-vglk4_d984a81d-2489-42fa-b527-8962119b7dc5/init/0.log" Dec 04 16:16:02 crc kubenswrapper[4946]: I1204 16:16:02.096618 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-vglk4_d984a81d-2489-42fa-b527-8962119b7dc5/dnsmasq-dns/0.log" Dec 04 16:16:02 crc kubenswrapper[4946]: I1204 16:16:02.109737 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-h6x67_707e8d7d-0e5d-4e4c-ab78-9a4745449b8c/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:02 crc kubenswrapper[4946]: I1204 16:16:02.318808 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_57cbb428-8955-4aa2-9025-cfdd74592074/glance-httpd/0.log" Dec 04 16:16:02 crc kubenswrapper[4946]: I1204 16:16:02.407578 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_57cbb428-8955-4aa2-9025-cfdd74592074/glance-log/0.log" Dec 04 16:16:02 crc kubenswrapper[4946]: I1204 16:16:02.582515 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_0b2521f9-40c6-4e13-a510-68d5dc34b313/glance-httpd/0.log" Dec 04 16:16:02 crc kubenswrapper[4946]: I1204 16:16:02.621073 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_0b2521f9-40c6-4e13-a510-68d5dc34b313/glance-log/0.log" Dec 04 16:16:02 crc kubenswrapper[4946]: I1204 16:16:02.756436 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-bn587_f35809fc-31b6-4c6b-a652-928ed15e187e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:03 crc kubenswrapper[4946]: I1204 16:16:03.035696 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_25621d99-0fe9-42fe-a800-08160c4740aa/cloudkitty-proc/0.log" Dec 04 16:16:03 crc kubenswrapper[4946]: I1204 16:16:03.113890 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-hw8x7_ab9c79b0-c651-4fdb-aa44-76b66239ef80/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:03 crc kubenswrapper[4946]: I1204 16:16:03.346607 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29414401-bnsr9_ea23c958-243b-479d-a5f3-83e729f96b17/keystone-cron/0.log" Dec 04 16:16:03 crc kubenswrapper[4946]: I1204 16:16:03.434229 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_7bb3c93e-3400-4b38-bc6d-733a1d345435/kube-state-metrics/0.log" Dec 04 16:16:03 crc kubenswrapper[4946]: I1204 16:16:03.477026 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-b7bc594d8-sjpg5_279e516e-61bc-4d5b-a3f9-34ecc6c5f47b/keystone-api/0.log" Dec 04 16:16:03 crc kubenswrapper[4946]: I1204 16:16:03.625543 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-hsj7w_b0812311-5552-4d94-aa72-d7274447e1f6/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:04 crc kubenswrapper[4946]: I1204 16:16:04.253371 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9c7bc6557-kqv86_5ff5d11b-6f56-4794-97a4-172ef873766c/neutron-httpd/0.log" Dec 04 16:16:04 crc kubenswrapper[4946]: I1204 16:16:04.292072 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-p46wf_4370c15e-59ff-447e-a825-c687fde1efe0/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:04 crc kubenswrapper[4946]: I1204 16:16:04.359454 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9c7bc6557-kqv86_5ff5d11b-6f56-4794-97a4-172ef873766c/neutron-api/0.log" Dec 04 16:16:04 crc kubenswrapper[4946]: I1204 16:16:04.973117 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_580cdc0a-af87-4eac-8b8e-79d451eb312c/nova-api-log/0.log" Dec 04 16:16:05 crc kubenswrapper[4946]: I1204 16:16:05.254503 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_75446cac-ffe3-4e3a-9bde-e8372b8318c3/nova-cell0-conductor-conductor/0.log" Dec 04 16:16:05 crc kubenswrapper[4946]: I1204 16:16:05.359837 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_580cdc0a-af87-4eac-8b8e-79d451eb312c/nova-api-api/0.log" Dec 04 16:16:05 crc kubenswrapper[4946]: I1204 16:16:05.585599 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_4664ae52-b2f1-43d1-a79f-75ccb8fc3a07/nova-cell1-conductor-conductor/0.log" Dec 04 16:16:05 crc kubenswrapper[4946]: I1204 16:16:05.797343 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_2f2624be-b71d-475e-a895-515905f6ef24/nova-cell1-novncproxy-novncproxy/0.log" Dec 04 16:16:05 crc kubenswrapper[4946]: I1204 16:16:05.899791 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-xh4n5_16f11a61-301b-45bc-9ef4-675b164d4ace/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:06 crc kubenswrapper[4946]: I1204 16:16:06.046532 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_9c624054-0759-47af-af3e-4600907ab8b8/nova-metadata-log/0.log" Dec 04 16:16:06 crc kubenswrapper[4946]: I1204 16:16:06.515527 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_29ba8816-fd2e-4a8d-bbcf-d2178110c7eb/nova-scheduler-scheduler/0.log" Dec 04 16:16:06 crc kubenswrapper[4946]: I1204 16:16:06.651799 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_44a85e36-b029-4450-b8aa-11bf910d8139/mysql-bootstrap/0.log" Dec 04 16:16:06 crc kubenswrapper[4946]: I1204 16:16:06.834070 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_44a85e36-b029-4450-b8aa-11bf910d8139/mysql-bootstrap/0.log" Dec 04 16:16:06 crc kubenswrapper[4946]: I1204 16:16:06.946999 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_44a85e36-b029-4450-b8aa-11bf910d8139/galera/0.log" Dec 04 16:16:07 crc kubenswrapper[4946]: I1204 16:16:07.091004 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_43d26c42-eba9-4e5c-bd2d-7cdf7074a176/mysql-bootstrap/0.log" Dec 04 16:16:07 crc kubenswrapper[4946]: I1204 16:16:07.392641 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_43d26c42-eba9-4e5c-bd2d-7cdf7074a176/mysql-bootstrap/0.log" Dec 04 16:16:07 crc kubenswrapper[4946]: I1204 16:16:07.442202 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_43d26c42-eba9-4e5c-bd2d-7cdf7074a176/galera/0.log" Dec 04 16:16:07 crc kubenswrapper[4946]: I1204 16:16:07.455694 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:16:07 crc kubenswrapper[4946]: E1204 16:16:07.456014 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:16:07 crc kubenswrapper[4946]: I1204 16:16:07.845987 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_9c624054-0759-47af-af3e-4600907ab8b8/nova-metadata-metadata/0.log" Dec 04 16:16:08 crc kubenswrapper[4946]: I1204 16:16:08.016219 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_b5c0e428-98ad-4bda-aba1-685f1b5c8009/openstackclient/0.log" Dec 04 16:16:08 crc kubenswrapper[4946]: I1204 16:16:08.037608 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-nx7vc_3a94df46-46e0-4178-804b-1582e9cf7738/openstack-network-exporter/0.log" Dec 04 16:16:08 crc kubenswrapper[4946]: I1204 16:16:08.258602 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hc6tt_9c369924-f384-4ca1-b3ac-e1b334790f15/ovsdb-server-init/0.log" Dec 04 16:16:08 crc kubenswrapper[4946]: I1204 16:16:08.745842 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hc6tt_9c369924-f384-4ca1-b3ac-e1b334790f15/ovsdb-server/0.log" Dec 04 16:16:08 crc kubenswrapper[4946]: I1204 16:16:08.746502 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hc6tt_9c369924-f384-4ca1-b3ac-e1b334790f15/ovsdb-server-init/0.log" Dec 04 16:16:08 crc kubenswrapper[4946]: I1204 16:16:08.871826 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hc6tt_9c369924-f384-4ca1-b3ac-e1b334790f15/ovs-vswitchd/0.log" Dec 04 16:16:09 crc kubenswrapper[4946]: I1204 16:16:09.181076 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-qv4hw_2734e466-178a-4344-bfac-9adb5e4492a7/ovn-controller/0.log" Dec 04 16:16:09 crc kubenswrapper[4946]: I1204 16:16:09.229990 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-8b4ms_c761f173-f866-4098-adc7-426857a5004c/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:09 crc kubenswrapper[4946]: I1204 16:16:09.393581 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7/openstack-network-exporter/0.log" Dec 04 16:16:09 crc kubenswrapper[4946]: I1204 16:16:09.429740 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_bc7bd9b4-28cb-4cf3-b4d3-7ff1bf2f5db7/ovn-northd/0.log" Dec 04 16:16:09 crc kubenswrapper[4946]: I1204 16:16:09.594165 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_c6906a68-0819-41bc-a3d8-2ac76e77b67f/openstack-network-exporter/0.log" Dec 04 16:16:10 crc kubenswrapper[4946]: I1204 16:16:10.537565 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_f89f1623-6a48-4db4-8059-940887046c8e/ovsdbserver-sb/0.log" Dec 04 16:16:10 crc kubenswrapper[4946]: I1204 16:16:10.563765 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_c6906a68-0819-41bc-a3d8-2ac76e77b67f/ovsdbserver-nb/0.log" Dec 04 16:16:10 crc kubenswrapper[4946]: I1204 16:16:10.617568 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_f89f1623-6a48-4db4-8059-940887046c8e/openstack-network-exporter/0.log" Dec 04 16:16:10 crc kubenswrapper[4946]: I1204 16:16:10.942618 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6c965d6d44-d6246_8177c201-11cb-42af-8a3f-85944e6558a3/placement-api/0.log" Dec 04 16:16:10 crc kubenswrapper[4946]: I1204 16:16:10.996494 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6c965d6d44-d6246_8177c201-11cb-42af-8a3f-85944e6558a3/placement-log/0.log" Dec 04 16:16:11 crc kubenswrapper[4946]: I1204 16:16:11.146422 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_ce1f1c5e-70ed-463e-88d7-a0a960dd328d/init-config-reloader/0.log" Dec 04 16:16:11 crc kubenswrapper[4946]: I1204 16:16:11.835336 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_ce1f1c5e-70ed-463e-88d7-a0a960dd328d/thanos-sidecar/0.log" Dec 04 16:16:11 crc kubenswrapper[4946]: I1204 16:16:11.873296 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_ce1f1c5e-70ed-463e-88d7-a0a960dd328d/init-config-reloader/0.log" Dec 04 16:16:11 crc kubenswrapper[4946]: I1204 16:16:11.879459 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_ce1f1c5e-70ed-463e-88d7-a0a960dd328d/config-reloader/0.log" Dec 04 16:16:11 crc kubenswrapper[4946]: I1204 16:16:11.894799 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_ce1f1c5e-70ed-463e-88d7-a0a960dd328d/prometheus/0.log" Dec 04 16:16:12 crc kubenswrapper[4946]: I1204 16:16:12.130368 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6458626f-136f-475a-b7ad-cf32977e39eb/setup-container/0.log" Dec 04 16:16:12 crc kubenswrapper[4946]: I1204 16:16:12.407586 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6458626f-136f-475a-b7ad-cf32977e39eb/setup-container/0.log" Dec 04 16:16:12 crc kubenswrapper[4946]: I1204 16:16:12.568643 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6458626f-136f-475a-b7ad-cf32977e39eb/rabbitmq/0.log" Dec 04 16:16:12 crc kubenswrapper[4946]: I1204 16:16:12.670229 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f75f35c6-b58d-471d-9b5e-2d402f3ce92f/setup-container/0.log" Dec 04 16:16:12 crc kubenswrapper[4946]: I1204 16:16:12.904170 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f75f35c6-b58d-471d-9b5e-2d402f3ce92f/setup-container/0.log" Dec 04 16:16:13 crc kubenswrapper[4946]: I1204 16:16:13.051268 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f75f35c6-b58d-471d-9b5e-2d402f3ce92f/rabbitmq/0.log" Dec 04 16:16:13 crc kubenswrapper[4946]: I1204 16:16:13.080763 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-cjxpc_c93b77be-2594-456e-a0fc-0a73d3bc6a0b/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:13 crc kubenswrapper[4946]: I1204 16:16:13.344756 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-h9gzd_34194ffb-2211-4d3b-820e-87e8008211a8/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:13 crc kubenswrapper[4946]: I1204 16:16:13.357268 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-ttd4b_cc2c7406-87e9-4da5-b99c-845bddf4a05b/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:13 crc kubenswrapper[4946]: I1204 16:16:13.681772 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-dztmc_e00fcfe6-35fd-4ed3-9ac1-af6ebda6e207/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:13 crc kubenswrapper[4946]: I1204 16:16:13.798396 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-vxxvc_55c4138e-0212-42f5-a45c-52eead1474d3/ssh-known-hosts-edpm-deployment/0.log" Dec 04 16:16:14 crc kubenswrapper[4946]: I1204 16:16:14.149462 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8679d7877f-2wbn9_bec308a1-7b44-4153-a863-7b9755407899/proxy-server/0.log" Dec 04 16:16:14 crc kubenswrapper[4946]: I1204 16:16:14.191725 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8679d7877f-2wbn9_bec308a1-7b44-4153-a863-7b9755407899/proxy-httpd/0.log" Dec 04 16:16:14 crc kubenswrapper[4946]: I1204 16:16:14.678746 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-rnp6k_f2266dde-4870-46a5-9c4a-c348c6c4d4ed/swift-ring-rebalance/0.log" Dec 04 16:16:14 crc kubenswrapper[4946]: I1204 16:16:14.824289 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/account-reaper/0.log" Dec 04 16:16:14 crc kubenswrapper[4946]: I1204 16:16:14.874166 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/account-auditor/0.log" Dec 04 16:16:14 crc kubenswrapper[4946]: I1204 16:16:14.991187 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/account-replicator/0.log" Dec 04 16:16:15 crc kubenswrapper[4946]: I1204 16:16:15.115304 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/account-server/0.log" Dec 04 16:16:15 crc kubenswrapper[4946]: I1204 16:16:15.132820 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/container-auditor/0.log" Dec 04 16:16:15 crc kubenswrapper[4946]: I1204 16:16:15.201818 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/container-replicator/0.log" Dec 04 16:16:15 crc kubenswrapper[4946]: I1204 16:16:15.528329 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/container-updater/0.log" Dec 04 16:16:15 crc kubenswrapper[4946]: I1204 16:16:15.552481 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/container-server/0.log" Dec 04 16:16:15 crc kubenswrapper[4946]: I1204 16:16:15.556911 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/object-auditor/0.log" Dec 04 16:16:15 crc kubenswrapper[4946]: I1204 16:16:15.564217 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/object-expirer/0.log" Dec 04 16:16:15 crc kubenswrapper[4946]: I1204 16:16:15.793250 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/object-replicator/0.log" Dec 04 16:16:15 crc kubenswrapper[4946]: I1204 16:16:15.796914 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/object-server/0.log" Dec 04 16:16:15 crc kubenswrapper[4946]: I1204 16:16:15.813650 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/object-updater/0.log" Dec 04 16:16:15 crc kubenswrapper[4946]: I1204 16:16:15.859439 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/rsync/0.log" Dec 04 16:16:16 crc kubenswrapper[4946]: I1204 16:16:16.030955 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b7676ab4-212c-4e17-a84a-0979a65936d1/swift-recon-cron/0.log" Dec 04 16:16:16 crc kubenswrapper[4946]: I1204 16:16:16.126033 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-2wkhj_c3a03510-ccc5-4bce-9a72-0e943fd6423d/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:16 crc kubenswrapper[4946]: I1204 16:16:16.538088 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_58d92c3f-b05e-47c1-89f7-55d7c3686966/test-operator-logs-container/0.log" Dec 04 16:16:16 crc kubenswrapper[4946]: I1204 16:16:16.560976 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_3ca94ef1-1df3-4925-9335-b30db3fbffb9/tempest-tests-tempest-tests-runner/0.log" Dec 04 16:16:16 crc kubenswrapper[4946]: I1204 16:16:16.703439 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-6l555_a3311d26-79ab-4472-944b-4d6ac8847a76/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 16:16:19 crc kubenswrapper[4946]: I1204 16:16:19.492664 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:16:19 crc kubenswrapper[4946]: E1204 16:16:19.493801 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:16:20 crc kubenswrapper[4946]: I1204 16:16:20.414523 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_74261534-d493-4bb6-ac4f-e7196daaa71f/memcached/0.log" Dec 04 16:16:31 crc kubenswrapper[4946]: I1204 16:16:31.453311 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:16:31 crc kubenswrapper[4946]: E1204 16:16:31.454803 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:16:44 crc kubenswrapper[4946]: I1204 16:16:44.453863 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:16:44 crc kubenswrapper[4946]: E1204 16:16:44.455222 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:16:56 crc kubenswrapper[4946]: I1204 16:16:56.455321 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:16:56 crc kubenswrapper[4946]: E1204 16:16:56.456519 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:16:57 crc kubenswrapper[4946]: I1204 16:16:57.325075 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-hdr95_6d911452-36e0-4227-9068-4ed0b86f025c/kube-rbac-proxy/0.log" Dec 04 16:16:57 crc kubenswrapper[4946]: I1204 16:16:57.379798 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-hdr95_6d911452-36e0-4227-9068-4ed0b86f025c/manager/0.log" Dec 04 16:16:57 crc kubenswrapper[4946]: I1204 16:16:57.554679 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-29dnk_76e27cbb-fdb9-447e-983f-48b7dbe8d46d/kube-rbac-proxy/0.log" Dec 04 16:16:57 crc kubenswrapper[4946]: I1204 16:16:57.736574 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/util/0.log" Dec 04 16:16:57 crc kubenswrapper[4946]: I1204 16:16:57.748605 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-29dnk_76e27cbb-fdb9-447e-983f-48b7dbe8d46d/manager/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.043317 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/pull/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.047787 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/util/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.059206 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/pull/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.267007 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/util/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.291908 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/pull/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.318877 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de42c82fef0755fb84da265cd3afc956cb4ae6584911cffa3bfa89aa52vvx7s_f8b2c834-77be-4c4c-90f9-ab83696108a8/extract/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.492076 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-zdqwh_d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f/manager/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.511615 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-zdqwh_d4f2bb8c-1eac-4b12-bd9a-9c8ebad7d96f/kube-rbac-proxy/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.542335 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-fsn5z_a69ef7eb-6ffc-47cb-b7ee-7c46734d0857/kube-rbac-proxy/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.803905 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-fsn5z_a69ef7eb-6ffc-47cb-b7ee-7c46734d0857/manager/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.813660 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-bd6fv_ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5/manager/0.log" Dec 04 16:16:58 crc kubenswrapper[4946]: I1204 16:16:58.834529 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-bd6fv_ae1dfef3-ccf2-4ac3-986e-77c23bddcdb5/kube-rbac-proxy/0.log" Dec 04 16:16:59 crc kubenswrapper[4946]: I1204 16:16:59.106986 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-rrwq2_866cf896-d679-426b-80d9-de7a368958ed/kube-rbac-proxy/0.log" Dec 04 16:16:59 crc kubenswrapper[4946]: I1204 16:16:59.184072 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-rrwq2_866cf896-d679-426b-80d9-de7a368958ed/manager/0.log" Dec 04 16:16:59 crc kubenswrapper[4946]: I1204 16:16:59.389274 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-tpk4r_421ad636-5eeb-4596-84c0-a0ca3cfbdef2/kube-rbac-proxy/0.log" Dec 04 16:16:59 crc kubenswrapper[4946]: I1204 16:16:59.576245 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-bnspk_9584ac77-41db-4621-a720-88b7c107ffa2/kube-rbac-proxy/0.log" Dec 04 16:16:59 crc kubenswrapper[4946]: I1204 16:16:59.639634 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-bnspk_9584ac77-41db-4621-a720-88b7c107ffa2/manager/0.log" Dec 04 16:16:59 crc kubenswrapper[4946]: I1204 16:16:59.667916 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-tpk4r_421ad636-5eeb-4596-84c0-a0ca3cfbdef2/manager/0.log" Dec 04 16:16:59 crc kubenswrapper[4946]: I1204 16:16:59.806746 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-8nbch_965366ad-4bb5-424a-9cf0-d09c42dec244/kube-rbac-proxy/0.log" Dec 04 16:16:59 crc kubenswrapper[4946]: I1204 16:16:59.951689 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-8nbch_965366ad-4bb5-424a-9cf0-d09c42dec244/manager/0.log" Dec 04 16:17:00 crc kubenswrapper[4946]: I1204 16:17:00.040457 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-j8r75_52d7003e-8315-49b6-b086-f0655f555960/kube-rbac-proxy/0.log" Dec 04 16:17:00 crc kubenswrapper[4946]: I1204 16:17:00.121036 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-j8r75_52d7003e-8315-49b6-b086-f0655f555960/manager/0.log" Dec 04 16:17:00 crc kubenswrapper[4946]: I1204 16:17:00.362734 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-2wkbw_db2d87e7-4cf3-4d0d-b77e-2d02a073872c/manager/0.log" Dec 04 16:17:00 crc kubenswrapper[4946]: I1204 16:17:00.388826 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-2wkbw_db2d87e7-4cf3-4d0d-b77e-2d02a073872c/kube-rbac-proxy/0.log" Dec 04 16:17:00 crc kubenswrapper[4946]: I1204 16:17:00.580563 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-ptn6j_41b063f1-7646-49dc-85e4-9e7185220de1/kube-rbac-proxy/0.log" Dec 04 16:17:00 crc kubenswrapper[4946]: I1204 16:17:00.587942 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-ptn6j_41b063f1-7646-49dc-85e4-9e7185220de1/manager/0.log" Dec 04 16:17:00 crc kubenswrapper[4946]: I1204 16:17:00.669603 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-vzpjw_10b2d29b-4444-4dfe-ad8f-ad913798df88/kube-rbac-proxy/0.log" Dec 04 16:17:00 crc kubenswrapper[4946]: I1204 16:17:00.850383 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-vzpjw_10b2d29b-4444-4dfe-ad8f-ad913798df88/manager/0.log" Dec 04 16:17:00 crc kubenswrapper[4946]: I1204 16:17:00.862419 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-77ss8_262aaccf-cdc8-44b6-8fc6-8702491cfad8/kube-rbac-proxy/0.log" Dec 04 16:17:00 crc kubenswrapper[4946]: I1204 16:17:00.879765 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-77ss8_262aaccf-cdc8-44b6-8fc6-8702491cfad8/manager/0.log" Dec 04 16:17:01 crc kubenswrapper[4946]: I1204 16:17:01.154034 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4frldw_e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6/kube-rbac-proxy/0.log" Dec 04 16:17:01 crc kubenswrapper[4946]: I1204 16:17:01.208856 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4frldw_e9b1e4f0-10d3-41fc-bc9b-235ef1ee5cc6/manager/0.log" Dec 04 16:17:01 crc kubenswrapper[4946]: I1204 16:17:01.654772 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-ttbdt_06d05850-f87f-4944-be9f-c3f86f6bbc3e/registry-server/0.log" Dec 04 16:17:01 crc kubenswrapper[4946]: I1204 16:17:01.750468 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-tp7zf_4439c79c-3951-4b61-98ad-86f417432fde/kube-rbac-proxy/0.log" Dec 04 16:17:01 crc kubenswrapper[4946]: I1204 16:17:01.780534 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-769dd9b968-btnbt_aa1fefcd-b28b-4ecf-9b92-e1fabe27cd26/operator/0.log" Dec 04 16:17:01 crc kubenswrapper[4946]: I1204 16:17:01.970709 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-tp7zf_4439c79c-3951-4b61-98ad-86f417432fde/manager/0.log" Dec 04 16:17:02 crc kubenswrapper[4946]: I1204 16:17:02.072545 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-rdrpr_110a7ea7-4b02-4f5d-be16-87c4f0090eec/kube-rbac-proxy/0.log" Dec 04 16:17:02 crc kubenswrapper[4946]: I1204 16:17:02.130343 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-rdrpr_110a7ea7-4b02-4f5d-be16-87c4f0090eec/manager/0.log" Dec 04 16:17:02 crc kubenswrapper[4946]: I1204 16:17:02.405240 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-25vwl_b7a5eb4e-a8b8-43e5-95cf-51f40d454d79/kube-rbac-proxy/0.log" Dec 04 16:17:02 crc kubenswrapper[4946]: I1204 16:17:02.471880 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7b58c9d549-7lmqq_6a246ded-a3c1-42c5-a6a7-648dec93f77f/manager/0.log" Dec 04 16:17:02 crc kubenswrapper[4946]: I1204 16:17:02.485812 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-ffkxj_57d9b742-9429-43c6-8798-6813c321866f/operator/0.log" Dec 04 16:17:02 crc kubenswrapper[4946]: I1204 16:17:02.642064 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-25vwl_b7a5eb4e-a8b8-43e5-95cf-51f40d454d79/manager/0.log" Dec 04 16:17:02 crc kubenswrapper[4946]: I1204 16:17:02.897259 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d9cf8555c-csjn7_c9933077-41f3-425f-b478-c53691b7d817/kube-rbac-proxy/0.log" Dec 04 16:17:02 crc kubenswrapper[4946]: I1204 16:17:02.957956 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-bqtnh_c92477ee-92e6-4dca-af5d-9b0f44bcaf60/kube-rbac-proxy/0.log" Dec 04 16:17:03 crc kubenswrapper[4946]: I1204 16:17:03.138676 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-bqtnh_c92477ee-92e6-4dca-af5d-9b0f44bcaf60/manager/0.log" Dec 04 16:17:03 crc kubenswrapper[4946]: I1204 16:17:03.219577 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5d9cf8555c-csjn7_c9933077-41f3-425f-b478-c53691b7d817/manager/0.log" Dec 04 16:17:03 crc kubenswrapper[4946]: I1204 16:17:03.278497 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-f6jlm_fa8a1267-46f8-4554-8a91-7389be265abd/kube-rbac-proxy/0.log" Dec 04 16:17:03 crc kubenswrapper[4946]: I1204 16:17:03.287887 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-f6jlm_fa8a1267-46f8-4554-8a91-7389be265abd/manager/0.log" Dec 04 16:17:08 crc kubenswrapper[4946]: I1204 16:17:08.453301 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:17:08 crc kubenswrapper[4946]: E1204 16:17:08.454276 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:17:21 crc kubenswrapper[4946]: I1204 16:17:21.454178 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:17:21 crc kubenswrapper[4946]: E1204 16:17:21.455888 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:17:31 crc kubenswrapper[4946]: I1204 16:17:31.392329 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-rvvcq_be182b12-eeb7-4695-b7e4-247044da76cf/control-plane-machine-set-operator/0.log" Dec 04 16:17:31 crc kubenswrapper[4946]: I1204 16:17:31.566780 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7bzmc_65dc1ade-ddd4-4a22-99bd-780112f318f9/kube-rbac-proxy/0.log" Dec 04 16:17:31 crc kubenswrapper[4946]: I1204 16:17:31.649071 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7bzmc_65dc1ade-ddd4-4a22-99bd-780112f318f9/machine-api-operator/0.log" Dec 04 16:17:34 crc kubenswrapper[4946]: I1204 16:17:34.453291 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:17:34 crc kubenswrapper[4946]: E1204 16:17:34.454303 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:17:45 crc kubenswrapper[4946]: I1204 16:17:45.452982 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:17:45 crc kubenswrapper[4946]: E1204 16:17:45.453919 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:17:50 crc kubenswrapper[4946]: I1204 16:17:50.125917 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-cr2wp_41b2bdd0-54fd-436e-a498-056e3fdd6934/cert-manager-controller/0.log" Dec 04 16:17:50 crc kubenswrapper[4946]: I1204 16:17:50.304381 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-kd8gk_bb8b188e-8662-4027-9493-886326967ed1/cert-manager-cainjector/0.log" Dec 04 16:17:50 crc kubenswrapper[4946]: I1204 16:17:50.353466 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-dd4pw_3611b347-1802-4635-8abd-47d9a6f4ad29/cert-manager-webhook/0.log" Dec 04 16:18:00 crc kubenswrapper[4946]: I1204 16:18:00.459081 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:18:00 crc kubenswrapper[4946]: E1204 16:18:00.462159 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:18:06 crc kubenswrapper[4946]: I1204 16:18:06.965025 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-ms9gw_cc926dea-6324-4350-bf4c-6f4142b2547b/nmstate-console-plugin/0.log" Dec 04 16:18:07 crc kubenswrapper[4946]: I1204 16:18:07.214445 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-b6z2h_5ae26fa8-9751-40d0-b327-45011a9ec579/nmstate-handler/0.log" Dec 04 16:18:07 crc kubenswrapper[4946]: I1204 16:18:07.219542 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-bxmt7_70f33645-a744-4196-a5d0-e577c90023d5/kube-rbac-proxy/0.log" Dec 04 16:18:07 crc kubenswrapper[4946]: I1204 16:18:07.291933 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-bxmt7_70f33645-a744-4196-a5d0-e577c90023d5/nmstate-metrics/0.log" Dec 04 16:18:07 crc kubenswrapper[4946]: I1204 16:18:07.438986 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-x8ccj_05dcb49b-4fdf-4fdb-b619-fc7649bb203d/nmstate-operator/0.log" Dec 04 16:18:07 crc kubenswrapper[4946]: I1204 16:18:07.530764 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-cb89w_4f22e89a-c84f-4f88-8718-2d3c7238324a/nmstate-webhook/0.log" Dec 04 16:18:14 crc kubenswrapper[4946]: I1204 16:18:14.454223 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:18:14 crc kubenswrapper[4946]: E1204 16:18:14.455842 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:18:25 crc kubenswrapper[4946]: I1204 16:18:25.453581 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:18:25 crc kubenswrapper[4946]: E1204 16:18:25.454681 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:18:25 crc kubenswrapper[4946]: I1204 16:18:25.523718 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5f5b48f4dc-5fjct_14739c62-fc32-41a5-be6d-3f6673c6a231/kube-rbac-proxy/0.log" Dec 04 16:18:25 crc kubenswrapper[4946]: I1204 16:18:25.585855 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5f5b48f4dc-5fjct_14739c62-fc32-41a5-be6d-3f6673c6a231/manager/0.log" Dec 04 16:18:38 crc kubenswrapper[4946]: I1204 16:18:38.453612 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:18:38 crc kubenswrapper[4946]: E1204 16:18:38.454659 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:18:43 crc kubenswrapper[4946]: I1204 16:18:43.960782 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ghxs7"] Dec 04 16:18:43 crc kubenswrapper[4946]: E1204 16:18:43.961922 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f" containerName="container-00" Dec 04 16:18:43 crc kubenswrapper[4946]: I1204 16:18:43.961937 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f" containerName="container-00" Dec 04 16:18:43 crc kubenswrapper[4946]: I1204 16:18:43.962167 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ea76d27-ef86-4c9d-8dfc-edf9d5461f9f" containerName="container-00" Dec 04 16:18:43 crc kubenswrapper[4946]: I1204 16:18:43.963826 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:43 crc kubenswrapper[4946]: I1204 16:18:43.981337 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ghxs7"] Dec 04 16:18:43 crc kubenswrapper[4946]: I1204 16:18:43.990395 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-utilities\") pod \"redhat-operators-ghxs7\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:43 crc kubenswrapper[4946]: I1204 16:18:43.990544 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q47r\" (UniqueName: \"kubernetes.io/projected/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-kube-api-access-8q47r\") pod \"redhat-operators-ghxs7\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:43 crc kubenswrapper[4946]: I1204 16:18:43.990602 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-catalog-content\") pod \"redhat-operators-ghxs7\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:44 crc kubenswrapper[4946]: I1204 16:18:44.093133 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-utilities\") pod \"redhat-operators-ghxs7\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:44 crc kubenswrapper[4946]: I1204 16:18:44.093299 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q47r\" (UniqueName: \"kubernetes.io/projected/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-kube-api-access-8q47r\") pod \"redhat-operators-ghxs7\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:44 crc kubenswrapper[4946]: I1204 16:18:44.093350 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-catalog-content\") pod \"redhat-operators-ghxs7\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:44 crc kubenswrapper[4946]: I1204 16:18:44.093627 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-utilities\") pod \"redhat-operators-ghxs7\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:44 crc kubenswrapper[4946]: I1204 16:18:44.094973 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-catalog-content\") pod \"redhat-operators-ghxs7\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:44 crc kubenswrapper[4946]: I1204 16:18:44.760002 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q47r\" (UniqueName: \"kubernetes.io/projected/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-kube-api-access-8q47r\") pod \"redhat-operators-ghxs7\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:44 crc kubenswrapper[4946]: I1204 16:18:44.884193 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:45 crc kubenswrapper[4946]: I1204 16:18:45.481334 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ghxs7"] Dec 04 16:18:46 crc kubenswrapper[4946]: I1204 16:18:46.042087 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2h9x2_12008ba7-79ab-4c29-beb5-c3d5bffa7bd2/kube-rbac-proxy/0.log" Dec 04 16:18:46 crc kubenswrapper[4946]: I1204 16:18:46.252060 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2h9x2_12008ba7-79ab-4c29-beb5-c3d5bffa7bd2/controller/0.log" Dec 04 16:18:46 crc kubenswrapper[4946]: I1204 16:18:46.303614 4946 generic.go:334] "Generic (PLEG): container finished" podID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerID="ddacd54a72edb854f99acf47672ade319ab59517e0cfadee4555baccb2bbd62f" exitCode=0 Dec 04 16:18:46 crc kubenswrapper[4946]: I1204 16:18:46.303677 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ghxs7" event={"ID":"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60","Type":"ContainerDied","Data":"ddacd54a72edb854f99acf47672ade319ab59517e0cfadee4555baccb2bbd62f"} Dec 04 16:18:46 crc kubenswrapper[4946]: I1204 16:18:46.303715 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ghxs7" event={"ID":"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60","Type":"ContainerStarted","Data":"5cb8ada54c7e4540d44ae99972a4e3dec2fedfc18a2cc7b1a7b570662b0081ba"} Dec 04 16:18:46 crc kubenswrapper[4946]: I1204 16:18:46.309931 4946 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 16:18:46 crc kubenswrapper[4946]: I1204 16:18:46.626566 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-frr-files/0.log" Dec 04 16:18:46 crc kubenswrapper[4946]: I1204 16:18:46.938357 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-frr-files/0.log" Dec 04 16:18:46 crc kubenswrapper[4946]: I1204 16:18:46.946108 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-reloader/0.log" Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.059862 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-reloader/0.log" Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.074401 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-metrics/0.log" Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.303290 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-frr-files/0.log" Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.317266 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ghxs7" event={"ID":"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60","Type":"ContainerStarted","Data":"aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a"} Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.339576 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-reloader/0.log" Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.354539 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-metrics/0.log" Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.458872 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-metrics/0.log" Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.604154 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-reloader/0.log" Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.648841 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-metrics/0.log" Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.682735 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/cp-frr-files/0.log" Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.792707 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/controller/0.log" Dec 04 16:18:47 crc kubenswrapper[4946]: I1204 16:18:47.958444 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/frr-metrics/0.log" Dec 04 16:18:48 crc kubenswrapper[4946]: I1204 16:18:48.089825 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/kube-rbac-proxy/0.log" Dec 04 16:18:48 crc kubenswrapper[4946]: I1204 16:18:48.131319 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/kube-rbac-proxy-frr/0.log" Dec 04 16:18:48 crc kubenswrapper[4946]: I1204 16:18:48.288321 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/reloader/0.log" Dec 04 16:18:48 crc kubenswrapper[4946]: I1204 16:18:48.400483 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-665hd_3fb0ad82-3e42-4980-ac9c-3fba3fac16fa/frr-k8s-webhook-server/0.log" Dec 04 16:18:48 crc kubenswrapper[4946]: I1204 16:18:48.715790 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-9c8665f76-qlhr2_14e7c7f4-2a8f-42fc-85aa-11f9a57be226/manager/0.log" Dec 04 16:18:48 crc kubenswrapper[4946]: I1204 16:18:48.927521 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-76986644d9-lbmnp_c07a0c11-227a-4c24-8daa-695fa165bb03/webhook-server/0.log" Dec 04 16:18:49 crc kubenswrapper[4946]: I1204 16:18:49.095446 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nsx9h_2c933247-f732-4808-a196-15d9ad5f03e7/kube-rbac-proxy/0.log" Dec 04 16:18:49 crc kubenswrapper[4946]: I1204 16:18:49.461837 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9r7z7_2232132f-0377-4daf-98dc-9a2ea013a794/frr/0.log" Dec 04 16:18:49 crc kubenswrapper[4946]: I1204 16:18:49.809290 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nsx9h_2c933247-f732-4808-a196-15d9ad5f03e7/speaker/0.log" Dec 04 16:18:50 crc kubenswrapper[4946]: I1204 16:18:50.347046 4946 generic.go:334] "Generic (PLEG): container finished" podID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerID="aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a" exitCode=0 Dec 04 16:18:50 crc kubenswrapper[4946]: I1204 16:18:50.347090 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ghxs7" event={"ID":"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60","Type":"ContainerDied","Data":"aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a"} Dec 04 16:18:52 crc kubenswrapper[4946]: I1204 16:18:52.376538 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ghxs7" event={"ID":"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60","Type":"ContainerStarted","Data":"143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e"} Dec 04 16:18:52 crc kubenswrapper[4946]: I1204 16:18:52.404539 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ghxs7" podStartSLOduration=4.357868345 podStartE2EDuration="9.404515968s" podCreationTimestamp="2025-12-04 16:18:43 +0000 UTC" firstStartedPulling="2025-12-04 16:18:46.309595449 +0000 UTC m=+4577.195639090" lastFinishedPulling="2025-12-04 16:18:51.356243072 +0000 UTC m=+4582.242286713" observedRunningTime="2025-12-04 16:18:52.399389542 +0000 UTC m=+4583.285433193" watchObservedRunningTime="2025-12-04 16:18:52.404515968 +0000 UTC m=+4583.290559609" Dec 04 16:18:52 crc kubenswrapper[4946]: I1204 16:18:52.453351 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:18:52 crc kubenswrapper[4946]: E1204 16:18:52.453653 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:18:54 crc kubenswrapper[4946]: I1204 16:18:54.884637 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:54 crc kubenswrapper[4946]: I1204 16:18:54.885332 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:18:55 crc kubenswrapper[4946]: I1204 16:18:55.965377 4946 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ghxs7" podUID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerName="registry-server" probeResult="failure" output=< Dec 04 16:18:55 crc kubenswrapper[4946]: timeout: failed to connect service ":50051" within 1s Dec 04 16:18:55 crc kubenswrapper[4946]: > Dec 04 16:19:03 crc kubenswrapper[4946]: I1204 16:19:03.454076 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:19:03 crc kubenswrapper[4946]: E1204 16:19:03.455709 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:19:04 crc kubenswrapper[4946]: I1204 16:19:04.942403 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:19:05 crc kubenswrapper[4946]: I1204 16:19:05.020098 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:19:05 crc kubenswrapper[4946]: I1204 16:19:05.179874 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ghxs7"] Dec 04 16:19:06 crc kubenswrapper[4946]: I1204 16:19:06.575806 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ghxs7" podUID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerName="registry-server" containerID="cri-o://143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e" gracePeriod=2 Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.377716 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.491420 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8q47r\" (UniqueName: \"kubernetes.io/projected/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-kube-api-access-8q47r\") pod \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.494054 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-catalog-content\") pod \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.494139 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-utilities\") pod \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\" (UID: \"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60\") " Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.495420 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-utilities" (OuterVolumeSpecName: "utilities") pod "ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" (UID: "ce7a2f2c-e165-4bd1-b19f-21a4c7809b60"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.496463 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.506542 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-kube-api-access-8q47r" (OuterVolumeSpecName: "kube-api-access-8q47r") pod "ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" (UID: "ce7a2f2c-e165-4bd1-b19f-21a4c7809b60"). InnerVolumeSpecName "kube-api-access-8q47r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.601814 4946 generic.go:334] "Generic (PLEG): container finished" podID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerID="143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e" exitCode=0 Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.602088 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ghxs7" event={"ID":"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60","Type":"ContainerDied","Data":"143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e"} Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.602174 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ghxs7" event={"ID":"ce7a2f2c-e165-4bd1-b19f-21a4c7809b60","Type":"ContainerDied","Data":"5cb8ada54c7e4540d44ae99972a4e3dec2fedfc18a2cc7b1a7b570662b0081ba"} Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.602328 4946 scope.go:117] "RemoveContainer" containerID="143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.602970 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ghxs7" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.638854 4946 scope.go:117] "RemoveContainer" containerID="aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.644738 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8q47r\" (UniqueName: \"kubernetes.io/projected/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-kube-api-access-8q47r\") on node \"crc\" DevicePath \"\"" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.677633 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" (UID: "ce7a2f2c-e165-4bd1-b19f-21a4c7809b60"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.716864 4946 scope.go:117] "RemoveContainer" containerID="ddacd54a72edb854f99acf47672ade319ab59517e0cfadee4555baccb2bbd62f" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.749442 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.768380 4946 scope.go:117] "RemoveContainer" containerID="143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e" Dec 04 16:19:07 crc kubenswrapper[4946]: E1204 16:19:07.772302 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e\": container with ID starting with 143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e not found: ID does not exist" containerID="143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.772351 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e"} err="failed to get container status \"143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e\": rpc error: code = NotFound desc = could not find container \"143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e\": container with ID starting with 143e82b1542c8cd9e1355c6248cf7dd4be7468e6d1e8a20ff20eebe1353ee55e not found: ID does not exist" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.772381 4946 scope.go:117] "RemoveContainer" containerID="aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a" Dec 04 16:19:07 crc kubenswrapper[4946]: E1204 16:19:07.772813 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a\": container with ID starting with aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a not found: ID does not exist" containerID="aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.772878 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a"} err="failed to get container status \"aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a\": rpc error: code = NotFound desc = could not find container \"aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a\": container with ID starting with aa20f614a3c82eff7a508387775bdbfa2f178b1077aa9f870106b6f8467ffa6a not found: ID does not exist" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.772922 4946 scope.go:117] "RemoveContainer" containerID="ddacd54a72edb854f99acf47672ade319ab59517e0cfadee4555baccb2bbd62f" Dec 04 16:19:07 crc kubenswrapper[4946]: E1204 16:19:07.773343 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddacd54a72edb854f99acf47672ade319ab59517e0cfadee4555baccb2bbd62f\": container with ID starting with ddacd54a72edb854f99acf47672ade319ab59517e0cfadee4555baccb2bbd62f not found: ID does not exist" containerID="ddacd54a72edb854f99acf47672ade319ab59517e0cfadee4555baccb2bbd62f" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.773386 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddacd54a72edb854f99acf47672ade319ab59517e0cfadee4555baccb2bbd62f"} err="failed to get container status \"ddacd54a72edb854f99acf47672ade319ab59517e0cfadee4555baccb2bbd62f\": rpc error: code = NotFound desc = could not find container \"ddacd54a72edb854f99acf47672ade319ab59517e0cfadee4555baccb2bbd62f\": container with ID starting with ddacd54a72edb854f99acf47672ade319ab59517e0cfadee4555baccb2bbd62f not found: ID does not exist" Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.946324 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ghxs7"] Dec 04 16:19:07 crc kubenswrapper[4946]: I1204 16:19:07.955295 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ghxs7"] Dec 04 16:19:09 crc kubenswrapper[4946]: I1204 16:19:09.466428 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" path="/var/lib/kubelet/pods/ce7a2f2c-e165-4bd1-b19f-21a4c7809b60/volumes" Dec 04 16:19:10 crc kubenswrapper[4946]: I1204 16:19:10.088350 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/util/0.log" Dec 04 16:19:10 crc kubenswrapper[4946]: I1204 16:19:10.354415 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/util/0.log" Dec 04 16:19:10 crc kubenswrapper[4946]: I1204 16:19:10.392750 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/pull/0.log" Dec 04 16:19:10 crc kubenswrapper[4946]: I1204 16:19:10.402263 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/pull/0.log" Dec 04 16:19:10 crc kubenswrapper[4946]: I1204 16:19:10.622748 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/pull/0.log" Dec 04 16:19:10 crc kubenswrapper[4946]: I1204 16:19:10.631463 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/extract/0.log" Dec 04 16:19:10 crc kubenswrapper[4946]: I1204 16:19:10.680870 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f7thzh_68203e19-3c15-4d99-a709-a7338b2f0dbc/util/0.log" Dec 04 16:19:10 crc kubenswrapper[4946]: I1204 16:19:10.799956 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/util/0.log" Dec 04 16:19:11 crc kubenswrapper[4946]: I1204 16:19:11.099898 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/pull/0.log" Dec 04 16:19:11 crc kubenswrapper[4946]: I1204 16:19:11.160877 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/util/0.log" Dec 04 16:19:11 crc kubenswrapper[4946]: I1204 16:19:11.210428 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/pull/0.log" Dec 04 16:19:11 crc kubenswrapper[4946]: I1204 16:19:11.400750 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/extract/0.log" Dec 04 16:19:11 crc kubenswrapper[4946]: I1204 16:19:11.431848 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/pull/0.log" Dec 04 16:19:11 crc kubenswrapper[4946]: I1204 16:19:11.469305 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92109gxlg_e4caec8c-f2e0-48dd-8138-6bccb6fafb86/util/0.log" Dec 04 16:19:12 crc kubenswrapper[4946]: I1204 16:19:12.332317 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/util/0.log" Dec 04 16:19:12 crc kubenswrapper[4946]: I1204 16:19:12.563886 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/pull/0.log" Dec 04 16:19:12 crc kubenswrapper[4946]: I1204 16:19:12.583158 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/util/0.log" Dec 04 16:19:12 crc kubenswrapper[4946]: I1204 16:19:12.599152 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/pull/0.log" Dec 04 16:19:12 crc kubenswrapper[4946]: I1204 16:19:12.827260 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/pull/0.log" Dec 04 16:19:12 crc kubenswrapper[4946]: I1204 16:19:12.896269 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/extract/0.log" Dec 04 16:19:12 crc kubenswrapper[4946]: I1204 16:19:12.898260 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1r4qtk_768219c3-3efe-4a2f-9ac3-55cd3247166e/util/0.log" Dec 04 16:19:13 crc kubenswrapper[4946]: I1204 16:19:13.042786 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/util/0.log" Dec 04 16:19:13 crc kubenswrapper[4946]: I1204 16:19:13.334220 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/pull/0.log" Dec 04 16:19:13 crc kubenswrapper[4946]: I1204 16:19:13.455923 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/util/0.log" Dec 04 16:19:13 crc kubenswrapper[4946]: I1204 16:19:13.493049 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/pull/0.log" Dec 04 16:19:13 crc kubenswrapper[4946]: I1204 16:19:13.633794 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/pull/0.log" Dec 04 16:19:13 crc kubenswrapper[4946]: I1204 16:19:13.653703 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/extract/0.log" Dec 04 16:19:13 crc kubenswrapper[4946]: I1204 16:19:13.708798 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-utilities/0.log" Dec 04 16:19:13 crc kubenswrapper[4946]: I1204 16:19:13.717756 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83x8mgv_7349bd68-959a-4268-a194-f55f10061076/util/0.log" Dec 04 16:19:14 crc kubenswrapper[4946]: I1204 16:19:14.012588 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-utilities/0.log" Dec 04 16:19:14 crc kubenswrapper[4946]: I1204 16:19:14.012655 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-content/0.log" Dec 04 16:19:14 crc kubenswrapper[4946]: I1204 16:19:14.145677 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-content/0.log" Dec 04 16:19:14 crc kubenswrapper[4946]: I1204 16:19:14.301562 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-utilities/0.log" Dec 04 16:19:14 crc kubenswrapper[4946]: I1204 16:19:14.324312 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/extract-content/0.log" Dec 04 16:19:14 crc kubenswrapper[4946]: I1204 16:19:14.555708 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-utilities/0.log" Dec 04 16:19:14 crc kubenswrapper[4946]: I1204 16:19:14.822881 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-utilities/0.log" Dec 04 16:19:14 crc kubenswrapper[4946]: I1204 16:19:14.861715 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-content/0.log" Dec 04 16:19:14 crc kubenswrapper[4946]: I1204 16:19:14.924687 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l6sf5_3fcf10a5-8a06-4542-9839-91e2881b5a5e/registry-server/0.log" Dec 04 16:19:14 crc kubenswrapper[4946]: I1204 16:19:14.941009 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-content/0.log" Dec 04 16:19:15 crc kubenswrapper[4946]: I1204 16:19:15.148311 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-2gvfq_0faffdcb-a8a7-4a01-b7fb-8f1a3ff869e8/marketplace-operator/0.log" Dec 04 16:19:15 crc kubenswrapper[4946]: I1204 16:19:15.197587 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-utilities/0.log" Dec 04 16:19:15 crc kubenswrapper[4946]: I1204 16:19:15.197800 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/extract-content/0.log" Dec 04 16:19:15 crc kubenswrapper[4946]: I1204 16:19:15.547374 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-utilities/0.log" Dec 04 16:19:15 crc kubenswrapper[4946]: I1204 16:19:15.828727 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8b6qp_b9e9c2ed-d146-49d0-94b7-e244eff03321/registry-server/0.log" Dec 04 16:19:15 crc kubenswrapper[4946]: I1204 16:19:15.868795 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-content/0.log" Dec 04 16:19:15 crc kubenswrapper[4946]: I1204 16:19:15.881472 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-content/0.log" Dec 04 16:19:15 crc kubenswrapper[4946]: I1204 16:19:15.886818 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-utilities/0.log" Dec 04 16:19:16 crc kubenswrapper[4946]: I1204 16:19:16.091085 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-utilities/0.log" Dec 04 16:19:16 crc kubenswrapper[4946]: I1204 16:19:16.109029 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-utilities/0.log" Dec 04 16:19:16 crc kubenswrapper[4946]: I1204 16:19:16.168089 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/extract-content/0.log" Dec 04 16:19:16 crc kubenswrapper[4946]: I1204 16:19:16.218206 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mgklh_64dc1b8a-cfaa-435d-a093-fff34239250b/registry-server/0.log" Dec 04 16:19:16 crc kubenswrapper[4946]: I1204 16:19:16.476069 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-utilities/0.log" Dec 04 16:19:16 crc kubenswrapper[4946]: I1204 16:19:16.503292 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-content/0.log" Dec 04 16:19:16 crc kubenswrapper[4946]: I1204 16:19:16.528532 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-content/0.log" Dec 04 16:19:16 crc kubenswrapper[4946]: I1204 16:19:16.801371 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-utilities/0.log" Dec 04 16:19:16 crc kubenswrapper[4946]: I1204 16:19:16.846858 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/extract-content/0.log" Dec 04 16:19:17 crc kubenswrapper[4946]: I1204 16:19:17.045365 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bfp64_4e2bfdb6-af28-4c0d-8f0d-a99539c5b225/registry-server/0.log" Dec 04 16:19:17 crc kubenswrapper[4946]: I1204 16:19:17.457101 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:19:17 crc kubenswrapper[4946]: E1204 16:19:17.458456 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:19:29 crc kubenswrapper[4946]: I1204 16:19:29.461032 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:19:29 crc kubenswrapper[4946]: E1204 16:19:29.462350 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.105884 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fzc98"] Dec 04 16:19:32 crc kubenswrapper[4946]: E1204 16:19:32.107181 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerName="registry-server" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.107197 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerName="registry-server" Dec 04 16:19:32 crc kubenswrapper[4946]: E1204 16:19:32.107237 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerName="extract-utilities" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.107244 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerName="extract-utilities" Dec 04 16:19:32 crc kubenswrapper[4946]: E1204 16:19:32.107268 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerName="extract-content" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.107276 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerName="extract-content" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.107529 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce7a2f2c-e165-4bd1-b19f-21a4c7809b60" containerName="registry-server" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.109717 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.119831 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzc98"] Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.181926 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-utilities\") pod \"redhat-marketplace-fzc98\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.182091 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-catalog-content\") pod \"redhat-marketplace-fzc98\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.182198 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6n79\" (UniqueName: \"kubernetes.io/projected/cc922a9e-d2de-4c7b-9389-290c3cfdb297-kube-api-access-n6n79\") pod \"redhat-marketplace-fzc98\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.284342 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6n79\" (UniqueName: \"kubernetes.io/projected/cc922a9e-d2de-4c7b-9389-290c3cfdb297-kube-api-access-n6n79\") pod \"redhat-marketplace-fzc98\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.284475 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-utilities\") pod \"redhat-marketplace-fzc98\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.284600 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-catalog-content\") pod \"redhat-marketplace-fzc98\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.285172 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-catalog-content\") pod \"redhat-marketplace-fzc98\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.285255 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-utilities\") pod \"redhat-marketplace-fzc98\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.310827 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6n79\" (UniqueName: \"kubernetes.io/projected/cc922a9e-d2de-4c7b-9389-290c3cfdb297-kube-api-access-n6n79\") pod \"redhat-marketplace-fzc98\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.429871 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:32 crc kubenswrapper[4946]: I1204 16:19:32.986664 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzc98"] Dec 04 16:19:33 crc kubenswrapper[4946]: I1204 16:19:33.922106 4946 generic.go:334] "Generic (PLEG): container finished" podID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" containerID="649c80679b8688fb8130a77dcedda7fb17882c364e7d1044c8d2c38d18c98122" exitCode=0 Dec 04 16:19:33 crc kubenswrapper[4946]: I1204 16:19:33.922684 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzc98" event={"ID":"cc922a9e-d2de-4c7b-9389-290c3cfdb297","Type":"ContainerDied","Data":"649c80679b8688fb8130a77dcedda7fb17882c364e7d1044c8d2c38d18c98122"} Dec 04 16:19:33 crc kubenswrapper[4946]: I1204 16:19:33.922713 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzc98" event={"ID":"cc922a9e-d2de-4c7b-9389-290c3cfdb297","Type":"ContainerStarted","Data":"d2c4e92e409b383567a12601c3ceb32ce1052d101d47eacc8eb5b071585935b4"} Dec 04 16:19:34 crc kubenswrapper[4946]: I1204 16:19:34.935298 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzc98" event={"ID":"cc922a9e-d2de-4c7b-9389-290c3cfdb297","Type":"ContainerStarted","Data":"581c804d9bbf43bde192e5531129394ced1dbde771ebbb534c493e126d5fba89"} Dec 04 16:19:35 crc kubenswrapper[4946]: I1204 16:19:35.072615 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-tjsxl_154e09f2-667a-45bf-abdb-fc3e1f0f0ba6/prometheus-operator/0.log" Dec 04 16:19:35 crc kubenswrapper[4946]: I1204 16:19:35.418665 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5665b7fdc6-g59sr_77a9c1a6-41d8-4285-a6ba-0aa3eb18fb11/prometheus-operator-admission-webhook/0.log" Dec 04 16:19:35 crc kubenswrapper[4946]: I1204 16:19:35.489154 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5665b7fdc6-zrrg5_695e348c-7a92-4a69-b104-1f37361d5c49/prometheus-operator-admission-webhook/0.log" Dec 04 16:19:35 crc kubenswrapper[4946]: I1204 16:19:35.658981 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-bmpm5_adb38877-f50c-48aa-a3ca-951150033479/operator/0.log" Dec 04 16:19:35 crc kubenswrapper[4946]: I1204 16:19:35.954645 4946 generic.go:334] "Generic (PLEG): container finished" podID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" containerID="581c804d9bbf43bde192e5531129394ced1dbde771ebbb534c493e126d5fba89" exitCode=0 Dec 04 16:19:35 crc kubenswrapper[4946]: I1204 16:19:35.954708 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzc98" event={"ID":"cc922a9e-d2de-4c7b-9389-290c3cfdb297","Type":"ContainerDied","Data":"581c804d9bbf43bde192e5531129394ced1dbde771ebbb534c493e126d5fba89"} Dec 04 16:19:36 crc kubenswrapper[4946]: I1204 16:19:36.628056 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-rf6pw_c9624505-3974-47fb-93d7-1a2ff73b29c7/perses-operator/0.log" Dec 04 16:19:36 crc kubenswrapper[4946]: I1204 16:19:36.968486 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzc98" event={"ID":"cc922a9e-d2de-4c7b-9389-290c3cfdb297","Type":"ContainerStarted","Data":"62b3af04fc3c541376866d48f7eefe4368329c07462c3e7684f7647ded9bfec8"} Dec 04 16:19:36 crc kubenswrapper[4946]: I1204 16:19:36.995238 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fzc98" podStartSLOduration=2.516741637 podStartE2EDuration="4.995214576s" podCreationTimestamp="2025-12-04 16:19:32 +0000 UTC" firstStartedPulling="2025-12-04 16:19:33.925900005 +0000 UTC m=+4624.811943646" lastFinishedPulling="2025-12-04 16:19:36.404372944 +0000 UTC m=+4627.290416585" observedRunningTime="2025-12-04 16:19:36.986942056 +0000 UTC m=+4627.872985707" watchObservedRunningTime="2025-12-04 16:19:36.995214576 +0000 UTC m=+4627.881258227" Dec 04 16:19:41 crc kubenswrapper[4946]: I1204 16:19:41.454191 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:19:41 crc kubenswrapper[4946]: E1204 16:19:41.455639 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:19:42 crc kubenswrapper[4946]: I1204 16:19:42.431004 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:42 crc kubenswrapper[4946]: I1204 16:19:42.431490 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:42 crc kubenswrapper[4946]: I1204 16:19:42.495392 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:43 crc kubenswrapper[4946]: I1204 16:19:43.112228 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:45 crc kubenswrapper[4946]: I1204 16:19:45.479365 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzc98"] Dec 04 16:19:45 crc kubenswrapper[4946]: I1204 16:19:45.480090 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fzc98" podUID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" containerName="registry-server" containerID="cri-o://62b3af04fc3c541376866d48f7eefe4368329c07462c3e7684f7647ded9bfec8" gracePeriod=2 Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.159730 4946 generic.go:334] "Generic (PLEG): container finished" podID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" containerID="62b3af04fc3c541376866d48f7eefe4368329c07462c3e7684f7647ded9bfec8" exitCode=0 Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.160087 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzc98" event={"ID":"cc922a9e-d2de-4c7b-9389-290c3cfdb297","Type":"ContainerDied","Data":"62b3af04fc3c541376866d48f7eefe4368329c07462c3e7684f7647ded9bfec8"} Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.382228 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.390482 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6n79\" (UniqueName: \"kubernetes.io/projected/cc922a9e-d2de-4c7b-9389-290c3cfdb297-kube-api-access-n6n79\") pod \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.390572 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-catalog-content\") pod \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.390637 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-utilities\") pod \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\" (UID: \"cc922a9e-d2de-4c7b-9389-290c3cfdb297\") " Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.391740 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-utilities" (OuterVolumeSpecName: "utilities") pod "cc922a9e-d2de-4c7b-9389-290c3cfdb297" (UID: "cc922a9e-d2de-4c7b-9389-290c3cfdb297"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.400059 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc922a9e-d2de-4c7b-9389-290c3cfdb297-kube-api-access-n6n79" (OuterVolumeSpecName: "kube-api-access-n6n79") pod "cc922a9e-d2de-4c7b-9389-290c3cfdb297" (UID: "cc922a9e-d2de-4c7b-9389-290c3cfdb297"). InnerVolumeSpecName "kube-api-access-n6n79". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.412816 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cc922a9e-d2de-4c7b-9389-290c3cfdb297" (UID: "cc922a9e-d2de-4c7b-9389-290c3cfdb297"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.494423 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.494760 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6n79\" (UniqueName: \"kubernetes.io/projected/cc922a9e-d2de-4c7b-9389-290c3cfdb297-kube-api-access-n6n79\") on node \"crc\" DevicePath \"\"" Dec 04 16:19:46 crc kubenswrapper[4946]: I1204 16:19:46.494774 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc922a9e-d2de-4c7b-9389-290c3cfdb297-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 16:19:47 crc kubenswrapper[4946]: I1204 16:19:47.173682 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzc98" event={"ID":"cc922a9e-d2de-4c7b-9389-290c3cfdb297","Type":"ContainerDied","Data":"d2c4e92e409b383567a12601c3ceb32ce1052d101d47eacc8eb5b071585935b4"} Dec 04 16:19:47 crc kubenswrapper[4946]: I1204 16:19:47.173780 4946 scope.go:117] "RemoveContainer" containerID="62b3af04fc3c541376866d48f7eefe4368329c07462c3e7684f7647ded9bfec8" Dec 04 16:19:47 crc kubenswrapper[4946]: I1204 16:19:47.173791 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzc98" Dec 04 16:19:47 crc kubenswrapper[4946]: I1204 16:19:47.196177 4946 scope.go:117] "RemoveContainer" containerID="581c804d9bbf43bde192e5531129394ced1dbde771ebbb534c493e126d5fba89" Dec 04 16:19:47 crc kubenswrapper[4946]: I1204 16:19:47.215784 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzc98"] Dec 04 16:19:47 crc kubenswrapper[4946]: I1204 16:19:47.230573 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzc98"] Dec 04 16:19:47 crc kubenswrapper[4946]: I1204 16:19:47.232585 4946 scope.go:117] "RemoveContainer" containerID="649c80679b8688fb8130a77dcedda7fb17882c364e7d1044c8d2c38d18c98122" Dec 04 16:19:47 crc kubenswrapper[4946]: I1204 16:19:47.467930 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" path="/var/lib/kubelet/pods/cc922a9e-d2de-4c7b-9389-290c3cfdb297/volumes" Dec 04 16:19:52 crc kubenswrapper[4946]: I1204 16:19:52.452615 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:19:52 crc kubenswrapper[4946]: E1204 16:19:52.453972 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:19:54 crc kubenswrapper[4946]: I1204 16:19:54.829757 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5f5b48f4dc-5fjct_14739c62-fc32-41a5-be6d-3f6673c6a231/kube-rbac-proxy/0.log" Dec 04 16:19:54 crc kubenswrapper[4946]: I1204 16:19:54.983855 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-5f5b48f4dc-5fjct_14739c62-fc32-41a5-be6d-3f6673c6a231/manager/0.log" Dec 04 16:19:56 crc kubenswrapper[4946]: I1204 16:19:56.985993 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lr92k"] Dec 04 16:19:56 crc kubenswrapper[4946]: E1204 16:19:56.987220 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" containerName="extract-utilities" Dec 04 16:19:56 crc kubenswrapper[4946]: I1204 16:19:56.987240 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" containerName="extract-utilities" Dec 04 16:19:56 crc kubenswrapper[4946]: E1204 16:19:56.987337 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" containerName="registry-server" Dec 04 16:19:56 crc kubenswrapper[4946]: I1204 16:19:56.987349 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" containerName="registry-server" Dec 04 16:19:56 crc kubenswrapper[4946]: E1204 16:19:56.987374 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" containerName="extract-content" Dec 04 16:19:56 crc kubenswrapper[4946]: I1204 16:19:56.987384 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" containerName="extract-content" Dec 04 16:19:56 crc kubenswrapper[4946]: I1204 16:19:56.987789 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc922a9e-d2de-4c7b-9389-290c3cfdb297" containerName="registry-server" Dec 04 16:19:56 crc kubenswrapper[4946]: I1204 16:19:56.990528 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.000946 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lr92k"] Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.166075 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82f0606d-ae99-437e-b5cb-bc71050f438e-catalog-content\") pod \"certified-operators-lr92k\" (UID: \"82f0606d-ae99-437e-b5cb-bc71050f438e\") " pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.166242 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhhnp\" (UniqueName: \"kubernetes.io/projected/82f0606d-ae99-437e-b5cb-bc71050f438e-kube-api-access-mhhnp\") pod \"certified-operators-lr92k\" (UID: \"82f0606d-ae99-437e-b5cb-bc71050f438e\") " pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.166277 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82f0606d-ae99-437e-b5cb-bc71050f438e-utilities\") pod \"certified-operators-lr92k\" (UID: \"82f0606d-ae99-437e-b5cb-bc71050f438e\") " pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.268660 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82f0606d-ae99-437e-b5cb-bc71050f438e-catalog-content\") pod \"certified-operators-lr92k\" (UID: \"82f0606d-ae99-437e-b5cb-bc71050f438e\") " pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.268783 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhhnp\" (UniqueName: \"kubernetes.io/projected/82f0606d-ae99-437e-b5cb-bc71050f438e-kube-api-access-mhhnp\") pod \"certified-operators-lr92k\" (UID: \"82f0606d-ae99-437e-b5cb-bc71050f438e\") " pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.268822 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82f0606d-ae99-437e-b5cb-bc71050f438e-utilities\") pod \"certified-operators-lr92k\" (UID: \"82f0606d-ae99-437e-b5cb-bc71050f438e\") " pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.269272 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82f0606d-ae99-437e-b5cb-bc71050f438e-catalog-content\") pod \"certified-operators-lr92k\" (UID: \"82f0606d-ae99-437e-b5cb-bc71050f438e\") " pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.269379 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82f0606d-ae99-437e-b5cb-bc71050f438e-utilities\") pod \"certified-operators-lr92k\" (UID: \"82f0606d-ae99-437e-b5cb-bc71050f438e\") " pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.288827 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhhnp\" (UniqueName: \"kubernetes.io/projected/82f0606d-ae99-437e-b5cb-bc71050f438e-kube-api-access-mhhnp\") pod \"certified-operators-lr92k\" (UID: \"82f0606d-ae99-437e-b5cb-bc71050f438e\") " pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.313998 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:19:57 crc kubenswrapper[4946]: I1204 16:19:57.945080 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lr92k"] Dec 04 16:19:58 crc kubenswrapper[4946]: I1204 16:19:58.289500 4946 generic.go:334] "Generic (PLEG): container finished" podID="82f0606d-ae99-437e-b5cb-bc71050f438e" containerID="76fecc2a0e3d8242da59a3dc5eb71681654d569b060e2545528897c2bcc7776f" exitCode=0 Dec 04 16:19:58 crc kubenswrapper[4946]: I1204 16:19:58.289959 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lr92k" event={"ID":"82f0606d-ae99-437e-b5cb-bc71050f438e","Type":"ContainerDied","Data":"76fecc2a0e3d8242da59a3dc5eb71681654d569b060e2545528897c2bcc7776f"} Dec 04 16:19:58 crc kubenswrapper[4946]: I1204 16:19:58.290004 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lr92k" event={"ID":"82f0606d-ae99-437e-b5cb-bc71050f438e","Type":"ContainerStarted","Data":"aabb031e085427a1567fc533c091c76c02636bdff20c903fb138149beaad9bd2"} Dec 04 16:20:04 crc kubenswrapper[4946]: I1204 16:20:04.453542 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:20:04 crc kubenswrapper[4946]: E1204 16:20:04.454835 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:20:09 crc kubenswrapper[4946]: E1204 16:20:09.155108 4946 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82f0606d_ae99_437e_b5cb_bc71050f438e.slice/crio-conmon-8c633d584dc9243b0dbcf46a319d25f1a10f9748202ac866a9fb5033c31bd4b6.scope\": RecentStats: unable to find data in memory cache]" Dec 04 16:20:09 crc kubenswrapper[4946]: I1204 16:20:09.449000 4946 generic.go:334] "Generic (PLEG): container finished" podID="82f0606d-ae99-437e-b5cb-bc71050f438e" containerID="8c633d584dc9243b0dbcf46a319d25f1a10f9748202ac866a9fb5033c31bd4b6" exitCode=0 Dec 04 16:20:09 crc kubenswrapper[4946]: I1204 16:20:09.449106 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lr92k" event={"ID":"82f0606d-ae99-437e-b5cb-bc71050f438e","Type":"ContainerDied","Data":"8c633d584dc9243b0dbcf46a319d25f1a10f9748202ac866a9fb5033c31bd4b6"} Dec 04 16:20:10 crc kubenswrapper[4946]: I1204 16:20:10.466198 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lr92k" event={"ID":"82f0606d-ae99-437e-b5cb-bc71050f438e","Type":"ContainerStarted","Data":"3847ce149469e946e0bcf4d07d6553856f3dbc1eb331cbbc0f88d4c15d994874"} Dec 04 16:20:10 crc kubenswrapper[4946]: I1204 16:20:10.496261 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lr92k" podStartSLOduration=2.842230254 podStartE2EDuration="14.496231052s" podCreationTimestamp="2025-12-04 16:19:56 +0000 UTC" firstStartedPulling="2025-12-04 16:19:58.292311808 +0000 UTC m=+4649.178355439" lastFinishedPulling="2025-12-04 16:20:09.946312596 +0000 UTC m=+4660.832356237" observedRunningTime="2025-12-04 16:20:10.48991201 +0000 UTC m=+4661.375955661" watchObservedRunningTime="2025-12-04 16:20:10.496231052 +0000 UTC m=+4661.382274693" Dec 04 16:20:17 crc kubenswrapper[4946]: I1204 16:20:17.316230 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:20:17 crc kubenswrapper[4946]: I1204 16:20:17.317107 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:20:17 crc kubenswrapper[4946]: I1204 16:20:17.373966 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:20:18 crc kubenswrapper[4946]: I1204 16:20:18.186435 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lr92k" Dec 04 16:20:18 crc kubenswrapper[4946]: I1204 16:20:18.287548 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lr92k"] Dec 04 16:20:18 crc kubenswrapper[4946]: I1204 16:20:18.369483 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l6sf5"] Dec 04 16:20:18 crc kubenswrapper[4946]: I1204 16:20:18.369761 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l6sf5" podUID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" containerName="registry-server" containerID="cri-o://837bd62a27c153a920de1b911c25e1dac4784a0f9ee8e508959d780b25fdb8e1" gracePeriod=2 Dec 04 16:20:18 crc kubenswrapper[4946]: I1204 16:20:18.626627 4946 generic.go:334] "Generic (PLEG): container finished" podID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" containerID="837bd62a27c153a920de1b911c25e1dac4784a0f9ee8e508959d780b25fdb8e1" exitCode=0 Dec 04 16:20:18 crc kubenswrapper[4946]: I1204 16:20:18.626759 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6sf5" event={"ID":"3fcf10a5-8a06-4542-9839-91e2881b5a5e","Type":"ContainerDied","Data":"837bd62a27c153a920de1b911c25e1dac4784a0f9ee8e508959d780b25fdb8e1"} Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.469148 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:20:19 crc kubenswrapper[4946]: E1204 16:20:19.471411 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.525806 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.566472 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-utilities\") pod \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.566592 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4l79f\" (UniqueName: \"kubernetes.io/projected/3fcf10a5-8a06-4542-9839-91e2881b5a5e-kube-api-access-4l79f\") pod \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.566821 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-catalog-content\") pod \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\" (UID: \"3fcf10a5-8a06-4542-9839-91e2881b5a5e\") " Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.571293 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-utilities" (OuterVolumeSpecName: "utilities") pod "3fcf10a5-8a06-4542-9839-91e2881b5a5e" (UID: "3fcf10a5-8a06-4542-9839-91e2881b5a5e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.576300 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.622431 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fcf10a5-8a06-4542-9839-91e2881b5a5e-kube-api-access-4l79f" (OuterVolumeSpecName: "kube-api-access-4l79f") pod "3fcf10a5-8a06-4542-9839-91e2881b5a5e" (UID: "3fcf10a5-8a06-4542-9839-91e2881b5a5e"). InnerVolumeSpecName "kube-api-access-4l79f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.656398 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l6sf5" Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.656503 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l6sf5" event={"ID":"3fcf10a5-8a06-4542-9839-91e2881b5a5e","Type":"ContainerDied","Data":"3e560638d52f9384f04418e922057c7fd7abaae33aadc4528712ea933471999f"} Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.656545 4946 scope.go:117] "RemoveContainer" containerID="837bd62a27c153a920de1b911c25e1dac4784a0f9ee8e508959d780b25fdb8e1" Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.711146 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4l79f\" (UniqueName: \"kubernetes.io/projected/3fcf10a5-8a06-4542-9839-91e2881b5a5e-kube-api-access-4l79f\") on node \"crc\" DevicePath \"\"" Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.740777 4946 scope.go:117] "RemoveContainer" containerID="63e6ac2f473f65dd76e3a95644e80f7763ad3935b3d777c3330466255b9de868" Dec 04 16:20:19 crc kubenswrapper[4946]: I1204 16:20:19.991872 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3fcf10a5-8a06-4542-9839-91e2881b5a5e" (UID: "3fcf10a5-8a06-4542-9839-91e2881b5a5e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:20:20 crc kubenswrapper[4946]: I1204 16:20:20.022701 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fcf10a5-8a06-4542-9839-91e2881b5a5e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 16:20:20 crc kubenswrapper[4946]: I1204 16:20:20.030538 4946 scope.go:117] "RemoveContainer" containerID="b2727b722e3a70afd501ccad98a0252f007abdf45aee111a177319c19bc76edf" Dec 04 16:20:20 crc kubenswrapper[4946]: I1204 16:20:20.062206 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l6sf5"] Dec 04 16:20:20 crc kubenswrapper[4946]: I1204 16:20:20.083961 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l6sf5"] Dec 04 16:20:21 crc kubenswrapper[4946]: I1204 16:20:21.468626 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" path="/var/lib/kubelet/pods/3fcf10a5-8a06-4542-9839-91e2881b5a5e/volumes" Dec 04 16:20:30 crc kubenswrapper[4946]: I1204 16:20:30.453846 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:20:30 crc kubenswrapper[4946]: E1204 16:20:30.456360 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:20:42 crc kubenswrapper[4946]: I1204 16:20:42.453893 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:20:42 crc kubenswrapper[4946]: E1204 16:20:42.454961 4946 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-qhv79_openshift-machine-config-operator(1f47d6bc-3d05-4c97-902f-5714244b2a1c)\"" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" Dec 04 16:20:54 crc kubenswrapper[4946]: I1204 16:20:54.083232 4946 scope.go:117] "RemoveContainer" containerID="9a22eb297c8344d6814f798e89dfa5a6e6157b5e77e80c321fef37714283ab06" Dec 04 16:20:54 crc kubenswrapper[4946]: I1204 16:20:54.452822 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" Dec 04 16:20:55 crc kubenswrapper[4946]: I1204 16:20:55.125318 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"5064f18ce83e706e3742b0f7eae74a984336a7d517d1d8af7f5888dc5a1ee324"} Dec 04 16:21:54 crc kubenswrapper[4946]: I1204 16:21:54.167884 4946 scope.go:117] "RemoveContainer" containerID="fb11d1dc211c9a27d9e9fcb88d51b26e81a157a94e489a4a46590848220dd675" Dec 04 16:22:05 crc kubenswrapper[4946]: I1204 16:22:05.112771 4946 generic.go:334] "Generic (PLEG): container finished" podID="40d14c1e-d816-4bcb-a55f-0be1ddb906f2" containerID="6e95e1a96f7b3326777e4039ac2ededd4eba7438fb89e0fa2988abf5694584ed" exitCode=0 Dec 04 16:22:05 crc kubenswrapper[4946]: I1204 16:22:05.112910 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tbv2b/must-gather-zn5k2" event={"ID":"40d14c1e-d816-4bcb-a55f-0be1ddb906f2","Type":"ContainerDied","Data":"6e95e1a96f7b3326777e4039ac2ededd4eba7438fb89e0fa2988abf5694584ed"} Dec 04 16:22:05 crc kubenswrapper[4946]: I1204 16:22:05.114335 4946 scope.go:117] "RemoveContainer" containerID="6e95e1a96f7b3326777e4039ac2ededd4eba7438fb89e0fa2988abf5694584ed" Dec 04 16:22:05 crc kubenswrapper[4946]: I1204 16:22:05.832737 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tbv2b_must-gather-zn5k2_40d14c1e-d816-4bcb-a55f-0be1ddb906f2/gather/0.log" Dec 04 16:22:16 crc kubenswrapper[4946]: I1204 16:22:16.575556 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tbv2b/must-gather-zn5k2"] Dec 04 16:22:16 crc kubenswrapper[4946]: I1204 16:22:16.576773 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-tbv2b/must-gather-zn5k2" podUID="40d14c1e-d816-4bcb-a55f-0be1ddb906f2" containerName="copy" containerID="cri-o://0b55d21fcc1ef16f81b60bb70e3a722dce3823eb61e29af6b6f2a31429fa167b" gracePeriod=2 Dec 04 16:22:16 crc kubenswrapper[4946]: I1204 16:22:16.585591 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tbv2b/must-gather-zn5k2"] Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.049200 4946 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2l7wh"] Dec 04 16:22:17 crc kubenswrapper[4946]: E1204 16:22:17.050827 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" containerName="extract-utilities" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.050851 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" containerName="extract-utilities" Dec 04 16:22:17 crc kubenswrapper[4946]: E1204 16:22:17.050874 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d14c1e-d816-4bcb-a55f-0be1ddb906f2" containerName="gather" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.050882 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d14c1e-d816-4bcb-a55f-0be1ddb906f2" containerName="gather" Dec 04 16:22:17 crc kubenswrapper[4946]: E1204 16:22:17.050896 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" containerName="extract-content" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.050905 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" containerName="extract-content" Dec 04 16:22:17 crc kubenswrapper[4946]: E1204 16:22:17.050934 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" containerName="registry-server" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.050942 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" containerName="registry-server" Dec 04 16:22:17 crc kubenswrapper[4946]: E1204 16:22:17.050956 4946 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d14c1e-d816-4bcb-a55f-0be1ddb906f2" containerName="copy" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.050964 4946 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d14c1e-d816-4bcb-a55f-0be1ddb906f2" containerName="copy" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.051251 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fcf10a5-8a06-4542-9839-91e2881b5a5e" containerName="registry-server" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.051283 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="40d14c1e-d816-4bcb-a55f-0be1ddb906f2" containerName="copy" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.051294 4946 memory_manager.go:354] "RemoveStaleState removing state" podUID="40d14c1e-d816-4bcb-a55f-0be1ddb906f2" containerName="gather" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.053219 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.059988 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2l7wh"] Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.167524 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-catalog-content\") pod \"community-operators-2l7wh\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.167645 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxzw2\" (UniqueName: \"kubernetes.io/projected/901e786b-bcbb-4acc-9ec0-617a75f42c14-kube-api-access-qxzw2\") pod \"community-operators-2l7wh\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.167731 4946 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-utilities\") pod \"community-operators-2l7wh\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.276035 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-catalog-content\") pod \"community-operators-2l7wh\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.276180 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxzw2\" (UniqueName: \"kubernetes.io/projected/901e786b-bcbb-4acc-9ec0-617a75f42c14-kube-api-access-qxzw2\") pod \"community-operators-2l7wh\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.276248 4946 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-utilities\") pod \"community-operators-2l7wh\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.276917 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-utilities\") pod \"community-operators-2l7wh\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.277204 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-catalog-content\") pod \"community-operators-2l7wh\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.323696 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tbv2b_must-gather-zn5k2_40d14c1e-d816-4bcb-a55f-0be1ddb906f2/copy/0.log" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.314972 4946 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tbv2b_must-gather-zn5k2_40d14c1e-d816-4bcb-a55f-0be1ddb906f2/copy/0.log" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.327531 4946 generic.go:334] "Generic (PLEG): container finished" podID="40d14c1e-d816-4bcb-a55f-0be1ddb906f2" containerID="0b55d21fcc1ef16f81b60bb70e3a722dce3823eb61e29af6b6f2a31429fa167b" exitCode=143 Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.327592 4946 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d054020738442d065dd79b3bf9020c1af4eed70c3a1c54114eac180e99ddb016" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.327634 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/must-gather-zn5k2" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.361526 4946 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxzw2\" (UniqueName: \"kubernetes.io/projected/901e786b-bcbb-4acc-9ec0-617a75f42c14-kube-api-access-qxzw2\") pod \"community-operators-2l7wh\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.420086 4946 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.480447 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-must-gather-output\") pod \"40d14c1e-d816-4bcb-a55f-0be1ddb906f2\" (UID: \"40d14c1e-d816-4bcb-a55f-0be1ddb906f2\") " Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.481486 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b94q\" (UniqueName: \"kubernetes.io/projected/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-kube-api-access-2b94q\") pod \"40d14c1e-d816-4bcb-a55f-0be1ddb906f2\" (UID: \"40d14c1e-d816-4bcb-a55f-0be1ddb906f2\") " Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.490585 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-kube-api-access-2b94q" (OuterVolumeSpecName: "kube-api-access-2b94q") pod "40d14c1e-d816-4bcb-a55f-0be1ddb906f2" (UID: "40d14c1e-d816-4bcb-a55f-0be1ddb906f2"). InnerVolumeSpecName "kube-api-access-2b94q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.587512 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b94q\" (UniqueName: \"kubernetes.io/projected/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-kube-api-access-2b94q\") on node \"crc\" DevicePath \"\"" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.746608 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "40d14c1e-d816-4bcb-a55f-0be1ddb906f2" (UID: "40d14c1e-d816-4bcb-a55f-0be1ddb906f2"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.797638 4946 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/40d14c1e-d816-4bcb-a55f-0be1ddb906f2-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 04 16:22:17 crc kubenswrapper[4946]: I1204 16:22:17.972811 4946 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2l7wh"] Dec 04 16:22:18 crc kubenswrapper[4946]: I1204 16:22:18.341977 4946 generic.go:334] "Generic (PLEG): container finished" podID="901e786b-bcbb-4acc-9ec0-617a75f42c14" containerID="a5c3efd4d879233088fe322a6f5e4d6b1da9c49da2c0b2df7b4ef276627fb6d2" exitCode=0 Dec 04 16:22:18 crc kubenswrapper[4946]: I1204 16:22:18.342072 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tbv2b/must-gather-zn5k2" Dec 04 16:22:18 crc kubenswrapper[4946]: I1204 16:22:18.342065 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2l7wh" event={"ID":"901e786b-bcbb-4acc-9ec0-617a75f42c14","Type":"ContainerDied","Data":"a5c3efd4d879233088fe322a6f5e4d6b1da9c49da2c0b2df7b4ef276627fb6d2"} Dec 04 16:22:18 crc kubenswrapper[4946]: I1204 16:22:18.342169 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2l7wh" event={"ID":"901e786b-bcbb-4acc-9ec0-617a75f42c14","Type":"ContainerStarted","Data":"c07be1e71d6bab2519c647fa2a4dc158491f29bf1d4fd3ea594dcf4e613b7fb0"} Dec 04 16:22:19 crc kubenswrapper[4946]: I1204 16:22:19.471220 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40d14c1e-d816-4bcb-a55f-0be1ddb906f2" path="/var/lib/kubelet/pods/40d14c1e-d816-4bcb-a55f-0be1ddb906f2/volumes" Dec 04 16:22:20 crc kubenswrapper[4946]: I1204 16:22:20.367385 4946 generic.go:334] "Generic (PLEG): container finished" podID="901e786b-bcbb-4acc-9ec0-617a75f42c14" containerID="a646ffdb02db0cca997fa754440a1d8f24c77323de271e9826e6d3c57382ca59" exitCode=0 Dec 04 16:22:20 crc kubenswrapper[4946]: I1204 16:22:20.367460 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2l7wh" event={"ID":"901e786b-bcbb-4acc-9ec0-617a75f42c14","Type":"ContainerDied","Data":"a646ffdb02db0cca997fa754440a1d8f24c77323de271e9826e6d3c57382ca59"} Dec 04 16:22:21 crc kubenswrapper[4946]: I1204 16:22:21.382132 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2l7wh" event={"ID":"901e786b-bcbb-4acc-9ec0-617a75f42c14","Type":"ContainerStarted","Data":"bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc"} Dec 04 16:22:21 crc kubenswrapper[4946]: I1204 16:22:21.410128 4946 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2l7wh" podStartSLOduration=2.948180013 podStartE2EDuration="5.410090149s" podCreationTimestamp="2025-12-04 16:22:16 +0000 UTC" firstStartedPulling="2025-12-04 16:22:18.344734669 +0000 UTC m=+4789.230778300" lastFinishedPulling="2025-12-04 16:22:20.806644795 +0000 UTC m=+4791.692688436" observedRunningTime="2025-12-04 16:22:21.404669511 +0000 UTC m=+4792.290713162" watchObservedRunningTime="2025-12-04 16:22:21.410090149 +0000 UTC m=+4792.296133790" Dec 04 16:22:27 crc kubenswrapper[4946]: I1204 16:22:27.421251 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:27 crc kubenswrapper[4946]: I1204 16:22:27.422143 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:28 crc kubenswrapper[4946]: I1204 16:22:28.121967 4946 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:28 crc kubenswrapper[4946]: I1204 16:22:28.172645 4946 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:28 crc kubenswrapper[4946]: I1204 16:22:28.370236 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2l7wh"] Dec 04 16:22:29 crc kubenswrapper[4946]: I1204 16:22:29.497413 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2l7wh" podUID="901e786b-bcbb-4acc-9ec0-617a75f42c14" containerName="registry-server" containerID="cri-o://bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc" gracePeriod=2 Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.151233 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.273281 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxzw2\" (UniqueName: \"kubernetes.io/projected/901e786b-bcbb-4acc-9ec0-617a75f42c14-kube-api-access-qxzw2\") pod \"901e786b-bcbb-4acc-9ec0-617a75f42c14\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.273687 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-catalog-content\") pod \"901e786b-bcbb-4acc-9ec0-617a75f42c14\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.273859 4946 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-utilities\") pod \"901e786b-bcbb-4acc-9ec0-617a75f42c14\" (UID: \"901e786b-bcbb-4acc-9ec0-617a75f42c14\") " Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.274554 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-utilities" (OuterVolumeSpecName: "utilities") pod "901e786b-bcbb-4acc-9ec0-617a75f42c14" (UID: "901e786b-bcbb-4acc-9ec0-617a75f42c14"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.286572 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/901e786b-bcbb-4acc-9ec0-617a75f42c14-kube-api-access-qxzw2" (OuterVolumeSpecName: "kube-api-access-qxzw2") pod "901e786b-bcbb-4acc-9ec0-617a75f42c14" (UID: "901e786b-bcbb-4acc-9ec0-617a75f42c14"). InnerVolumeSpecName "kube-api-access-qxzw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.333368 4946 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "901e786b-bcbb-4acc-9ec0-617a75f42c14" (UID: "901e786b-bcbb-4acc-9ec0-617a75f42c14"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.376891 4946 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxzw2\" (UniqueName: \"kubernetes.io/projected/901e786b-bcbb-4acc-9ec0-617a75f42c14-kube-api-access-qxzw2\") on node \"crc\" DevicePath \"\"" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.376938 4946 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.376951 4946 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/901e786b-bcbb-4acc-9ec0-617a75f42c14-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.511282 4946 generic.go:334] "Generic (PLEG): container finished" podID="901e786b-bcbb-4acc-9ec0-617a75f42c14" containerID="bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc" exitCode=0 Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.511341 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2l7wh" event={"ID":"901e786b-bcbb-4acc-9ec0-617a75f42c14","Type":"ContainerDied","Data":"bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc"} Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.511382 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2l7wh" event={"ID":"901e786b-bcbb-4acc-9ec0-617a75f42c14","Type":"ContainerDied","Data":"c07be1e71d6bab2519c647fa2a4dc158491f29bf1d4fd3ea594dcf4e613b7fb0"} Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.511384 4946 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2l7wh" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.511409 4946 scope.go:117] "RemoveContainer" containerID="bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.538092 4946 scope.go:117] "RemoveContainer" containerID="a646ffdb02db0cca997fa754440a1d8f24c77323de271e9826e6d3c57382ca59" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.559352 4946 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2l7wh"] Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.571629 4946 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2l7wh"] Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.578972 4946 scope.go:117] "RemoveContainer" containerID="a5c3efd4d879233088fe322a6f5e4d6b1da9c49da2c0b2df7b4ef276627fb6d2" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.641774 4946 scope.go:117] "RemoveContainer" containerID="bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc" Dec 04 16:22:30 crc kubenswrapper[4946]: E1204 16:22:30.642438 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc\": container with ID starting with bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc not found: ID does not exist" containerID="bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.642493 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc"} err="failed to get container status \"bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc\": rpc error: code = NotFound desc = could not find container \"bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc\": container with ID starting with bfa980dbe2c34e3b452f16585c3942d6aa7304e6361b9a582d8b098cff00b7cc not found: ID does not exist" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.642533 4946 scope.go:117] "RemoveContainer" containerID="a646ffdb02db0cca997fa754440a1d8f24c77323de271e9826e6d3c57382ca59" Dec 04 16:22:30 crc kubenswrapper[4946]: E1204 16:22:30.643216 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a646ffdb02db0cca997fa754440a1d8f24c77323de271e9826e6d3c57382ca59\": container with ID starting with a646ffdb02db0cca997fa754440a1d8f24c77323de271e9826e6d3c57382ca59 not found: ID does not exist" containerID="a646ffdb02db0cca997fa754440a1d8f24c77323de271e9826e6d3c57382ca59" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.643260 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a646ffdb02db0cca997fa754440a1d8f24c77323de271e9826e6d3c57382ca59"} err="failed to get container status \"a646ffdb02db0cca997fa754440a1d8f24c77323de271e9826e6d3c57382ca59\": rpc error: code = NotFound desc = could not find container \"a646ffdb02db0cca997fa754440a1d8f24c77323de271e9826e6d3c57382ca59\": container with ID starting with a646ffdb02db0cca997fa754440a1d8f24c77323de271e9826e6d3c57382ca59 not found: ID does not exist" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.643289 4946 scope.go:117] "RemoveContainer" containerID="a5c3efd4d879233088fe322a6f5e4d6b1da9c49da2c0b2df7b4ef276627fb6d2" Dec 04 16:22:30 crc kubenswrapper[4946]: E1204 16:22:30.643550 4946 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5c3efd4d879233088fe322a6f5e4d6b1da9c49da2c0b2df7b4ef276627fb6d2\": container with ID starting with a5c3efd4d879233088fe322a6f5e4d6b1da9c49da2c0b2df7b4ef276627fb6d2 not found: ID does not exist" containerID="a5c3efd4d879233088fe322a6f5e4d6b1da9c49da2c0b2df7b4ef276627fb6d2" Dec 04 16:22:30 crc kubenswrapper[4946]: I1204 16:22:30.643570 4946 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5c3efd4d879233088fe322a6f5e4d6b1da9c49da2c0b2df7b4ef276627fb6d2"} err="failed to get container status \"a5c3efd4d879233088fe322a6f5e4d6b1da9c49da2c0b2df7b4ef276627fb6d2\": rpc error: code = NotFound desc = could not find container \"a5c3efd4d879233088fe322a6f5e4d6b1da9c49da2c0b2df7b4ef276627fb6d2\": container with ID starting with a5c3efd4d879233088fe322a6f5e4d6b1da9c49da2c0b2df7b4ef276627fb6d2 not found: ID does not exist" Dec 04 16:22:31 crc kubenswrapper[4946]: I1204 16:22:31.465772 4946 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="901e786b-bcbb-4acc-9ec0-617a75f42c14" path="/var/lib/kubelet/pods/901e786b-bcbb-4acc-9ec0-617a75f42c14/volumes" Dec 04 16:22:54 crc kubenswrapper[4946]: I1204 16:22:54.297397 4946 scope.go:117] "RemoveContainer" containerID="6e95e1a96f7b3326777e4039ac2ededd4eba7438fb89e0fa2988abf5694584ed" Dec 04 16:22:54 crc kubenswrapper[4946]: I1204 16:22:54.397384 4946 scope.go:117] "RemoveContainer" containerID="0b55d21fcc1ef16f81b60bb70e3a722dce3823eb61e29af6b6f2a31429fa167b" Dec 04 16:23:22 crc kubenswrapper[4946]: I1204 16:23:22.478074 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:23:22 crc kubenswrapper[4946]: I1204 16:23:22.481201 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:23:52 crc kubenswrapper[4946]: I1204 16:23:52.478531 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:23:52 crc kubenswrapper[4946]: I1204 16:23:52.480556 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:24:22 crc kubenswrapper[4946]: I1204 16:24:22.478967 4946 patch_prober.go:28] interesting pod/machine-config-daemon-qhv79 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 16:24:22 crc kubenswrapper[4946]: I1204 16:24:22.479967 4946 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 16:24:22 crc kubenswrapper[4946]: I1204 16:24:22.480048 4946 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" Dec 04 16:24:22 crc kubenswrapper[4946]: I1204 16:24:22.481455 4946 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5064f18ce83e706e3742b0f7eae74a984336a7d517d1d8af7f5888dc5a1ee324"} pod="openshift-machine-config-operator/machine-config-daemon-qhv79" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 16:24:22 crc kubenswrapper[4946]: I1204 16:24:22.481790 4946 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" podUID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerName="machine-config-daemon" containerID="cri-o://5064f18ce83e706e3742b0f7eae74a984336a7d517d1d8af7f5888dc5a1ee324" gracePeriod=600 Dec 04 16:24:23 crc kubenswrapper[4946]: I1204 16:24:23.075804 4946 generic.go:334] "Generic (PLEG): container finished" podID="1f47d6bc-3d05-4c97-902f-5714244b2a1c" containerID="5064f18ce83e706e3742b0f7eae74a984336a7d517d1d8af7f5888dc5a1ee324" exitCode=0 Dec 04 16:24:23 crc kubenswrapper[4946]: I1204 16:24:23.076057 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerDied","Data":"5064f18ce83e706e3742b0f7eae74a984336a7d517d1d8af7f5888dc5a1ee324"} Dec 04 16:24:23 crc kubenswrapper[4946]: I1204 16:24:23.076898 4946 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-qhv79" event={"ID":"1f47d6bc-3d05-4c97-902f-5714244b2a1c","Type":"ContainerStarted","Data":"61ba2945a14fcf3d88a6e0212c83fb2e5b5e249bd49687a34a1e131bad2fdb1e"} Dec 04 16:24:23 crc kubenswrapper[4946]: I1204 16:24:23.076935 4946 scope.go:117] "RemoveContainer" containerID="044036d82ec7b33e7957f4d3c467600b36d0d136fd64ab07b550ac1154938eb3" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114333035024443 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114333035017360 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114320642016503 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114320642015453 5ustar corecore